Hierarchical Quick Shift (HQuickSHIFT)

Export the external libraries first. We also define the plotting and random initialization functions and global variables.

In [13]:
%reload_ext autoreload
%autoreload 2
%matplotlib inline

import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import glob
import os
import sys
import seaborn as sns
import sklearn
from sklearn.utils import shuffle
from sklearn.preprocessing import StandardScaler
import sklearn.datasets as data
import sklearn.cluster as cluster
import hdbscan
from QuickshiftPP import *
from MedoidShift_and_QuickShift.quick_shift_ import QuickShift
import networkx as nx
from mlpack import emst
from mlpack import fastmks
import umap

import torch
import fastai
from fastai.utils import *
from fastai.core import *
from fastai.text import *

USE_GPU = True
dtype = torch.float32
use_cuda = torch.cuda.is_available()
if USE_GPU and use_cuda:
    device = torch.device('cuda')
else:
    device = torch.device('cpu')
print('using device:', device)

# For reproducibility: origin of the randomness during training 
seed = 1234
seed_lst = [0, 23, 42, 1234, 43210, 1133557799, 22446688, 123456789, 987654321, 86420]
def random_seed(seed_value, use_cuda):
    np.random.seed(seed_value) # cpu vars
    torch.manual_seed(seed_value) # cpu  vars
    random.seed(seed_value) # Python
    if use_cuda: 
        torch.cuda.manual_seed(seed_value)
        torch.cuda.manual_seed_all(seed_value) # gpu vars
        torch.backends.cudnn.deterministic = True  #needed
        torch.backends.cudnn.benchmark = False
random_seed(seed_value=seed, use_cuda=use_cuda)

# suppress scientific float notation
np.set_printoptions(precision=6, suppress=True)
# filter out informational none critical warnings
import warnings
# matplotlib warnings are subclass of UserWarning
warnings.simplefilter("ignore", category=UserWarning)

fig_size = (20, 16)
plot_kwds = {'alpha' : 1, 's' : 40, 'linewidths':0}
def plot_clusters(data, labels=None, algorithm=None, args=None, kwds=None, plt_title=None, file_name=None):
    plt.figure(figsize=fig_size)
    if algorithm is not None:
        if algorithm.__name__=='QuickshiftPP':
            model = algorithm(**kwds)
            model.fit(data)
            labels = model.memberships
        else:
            labels = algorithm(*args, **kwds).fit_predict(data)   
        max_n_labels = np.maximum(8, np.unique(labels).max() + 1)
#         import pdb;pdb.set_trace()
        palette = sns.hls_palette(max_n_labels, l=.3, s=.8) 
        node_color = [palette[x] if x >= 0 else (0.0, 0.0, 0.0) for x in labels]
#         plt.title('Clusters found by {} and the number of Clusters: {:d}'.format(\
#                  str(algorithm.__name__), np.unique(labels).max() + 1), fontsize=28)
        plt.setp(plt.gca().get_xticklabels(), visible=False)
        plt.setp(plt.gca().get_yticklabels(), visible=False)
        plt.tight_layout()
    else:
        if labels is not None:
            max_n_labels = np.maximum(8, np.unique(labels).max() + 1)
            palette = sns.hls_palette(max_n_labels, l=.3, s=.8) 
            node_color = [palette[x] if x >= 0 else (0.0, 0.0, 0.0) for x in labels]
        else:
            node_color = ['b']      
    plt.scatter(data.T[0], data.T[1], c=node_color, **plot_kwds)
#     plt.setp(plt.gca().get_xticklabels(), visible=False)
#     plt.setp(plt.gca().get_yticklabels(), visible=False)
    plt.tight_layout()
#     if plt_title is not None: plt.title(plt_title)
    if file_name is not None: plt.savefig(file_name, dpi=200, bbox_inches='tight')
using device: cuda

It is a good habit to list the version of the libraries to reproduce the results and check their correct installation. An additional environment.yml file is also provided for this purpose under the provided folder.

You can create a new environment:

  • conda env create -f environment.yml
In [14]:
print("Numpy version:   %s" %(np.__version__))
print("Sklearn version: %s" %(sklearn.__version__))
print("PyTorch version: %s" %(torch.__version__))
print("Fastai version:  %s" %(fastai.__version__))
print("Umap version:    %s" %(umap.__version__))
Numpy version:   1.16.4
Sklearn version: 0.21.2
PyTorch version: 1.1.0
Fastai version:  1.0.46
Umap version:    0.3.9

EuclideanMinimumSpanningTree class to estimate the bandwidth in the scope of Hierarchical Quick Shift (HQuickSHIFT):

In [15]:
class EuclideanMinimumSpanningTree(object):
    """Build  Euclidean MinimumSpanning Tree with MLpack's 
    Euclidian minimum spanning tree (emst) routine. 
    
    Parameters
    ----------
    data: float, required 
        Array of input data shape (n_samples, n_features)
    copy_all_inputs: bool, optional (default=True)
        Params for MLpack's Euclidian minimum spanning tree (emst) and 
        Fast Max-Kernel Search (FastMKS).
        If specified, all input parameters will be deep copied before the method 
        is run. This is useful for debugging problems where the input parameters 
        are being modified by the algorithm, but can slow down the code.
    verbose: bool, optional (default=False)
        Params for MLpack's Euclidian minimum spanning tree (emst).
        Display informational messages and the full list of parameters and 
        timers at the end of execution.
    leaf_size: int, optional (default=20)
        Leaf size in the kd-tree built by MLpack's Euclidian minimum spanning 
        tree (emst) routine. One-element leaves give the empirically best 
        performance, but at the cost of greater memory requirements.
    """
    def __init__(self, data, copy_all_inputs=True, verbose=False, leaf_size=20):
        self._data = data
        self._copy_all_inputs = copy_all_inputs
        self._verbose = verbose
        self._leaf_size = leaf_size
        self._construct_graph()

    def _construct_graph(self):
        self._emst = emst(input=self._data, copy_all_inputs=self._copy_all_inputs,
                         verbose=self._verbose, leaf_size=self._leaf_size)['output']
        self._bandwidth = (np.mean(self._emst[:,2]))**(1/self._data.shape[1])
                   
    def plot(self, axis=None, with_labels=False, node_size=40,
             node_color='k', edge_cmap=plt.cm.viridis, edge_width=2, colorbar=True):
        """Plot the minimum spanning tree (as projected into 2D by UMAP if required).
        Parameters
        ----------
        axis: matplotlib axis, optional (default=None)
               The axis to render the plot to
        with_labels: bool, optional (default=False)
               Set to True to draw labels on the nodes.
        node_size: int, optional (default 40)
                The size of nodes in the plot.
        node_color: matplotlib color spec, optional (default black)
                The color to render nodes.
        edge_cmap: matplotlib colormap, optional (default plt.cm.viridis)
                The colormap to color edges by (varying color by edge weight/distance). 
                Can be a cmap object.
        edge_width: float, optional (default 2)
                The linewidth to use for rendering edges.
        colorbar: bool, optional (default True)
                Whether to draw a colorbar.
        Returns
        -------
        axis : matplotlib axis
                The axis used the render the plot.
        """
        self._G = nx.Graph()
        for row in self._emst:
            self._G.add_edge(row[0], row[1], weight=row[2])
        data_dict = {index: tuple(row) for index, row in enumerate(self._data)}
        nx.set_node_attributes(self._G, data_dict, 'node_pos')
        if axis is None:
            axis = plt.gca()
        _, edge_weight = zip(*nx.get_edge_attributes(self._G,'weight').items())
        node_pos = nx.get_node_attributes(self._G, 'node_pos')
        edge_vmin =  0.0; edge_vmax = max(edge_weight)
        nx.draw_networkx(self._G, ax=axis, pos=node_pos, 
                         with_labels=with_labels, node_size=node_size, node_color=node_color,
                         width=edge_width, edge_color=edge_weight, edge_cmap=edge_cmap,
                         edge_vmin=edge_vmin, edge_vmax=edge_vmax) 
        if colorbar:
            sm = plt.cm.ScalarMappable(cmap=edge_cmap,
                                       norm=plt.Normalize(vmin=edge_vmin, vmax=edge_vmax))
            sm.set_array([])
            cbar = plt.colorbar(sm, ax=axis)
            cbar.set_label('Euclidean distance', rotation=90)
#         plt.title('HQuickSHIFT Euclidean Minimum Spanning Tree, bandwidth: {:.2f}'.format(\
#                   self._bandwidth), fontsize=28)
        plt.setp(plt.gca().get_xticklabels(), visible=False)
        plt.setp(plt.gca().get_yticklabels(), visible=False)
        plt.tight_layout()
        return axis
    
    @property
    def bandwidth(self):
        return self._bandwidth 
    
    @property
    def emst_graph(self):
        return self._emst  

MutualReachabilityMinimumSpanningTree class is used to determine density connected modes in the scope of Hierarchical Quick Shift (HQuickSHIFT):

In [16]:
from hdbscan.hdbscan_ import _hdbscan_boruvka_balltree, _hdbscan_boruvka_kdtree
class MutualReachabilityMinimumSpanningTree(object):
    """Build  Mutual Reachability MinimumSpanning Tree with HDBSCAN
    Compute the internal minimum spanning tree given a data matrix 
    
    Parameters
    ----------
    data: float, required 
        Array of input data shape (n_samples, n_features)
    min_samples : int, optional (default=5)
        The number of samples in a neighborhood for a point (k-nn)
        to be considered as a core point. This includes the point itself.
    leaf_size: int, optional (default=20)
        Leaf size in the ball tree built by HDBSCAN.
    """
    def __init__(self, data, min_samples=5, leaf_size=20):
        self._data = data
        self._min_samples = min_samples
        self._leaf_size = leaf_size
        self._construct_graph()

    def _construct_graph(self):
        (_, self._mrmst) = (_hdbscan_boruvka_balltree(self._data,
                                                      min_samples=self._min_samples,
                                                      leaf_size=self._leaf_size,
                                                      gen_min_span_tree=True))
                   
    def plot(self, axis=None, with_labels=False, node_size=40,
             node_color='k', edge_cmap=plt.cm.viridis, edge_width=2, colorbar=True):
        """Plot the minimum spanning tree (as projected into 2D by UMAP if required).
        Parameters
        ----------
        axis: matplotlib axis, optional (default=None)
               The axis to render the plot to
        with_labels: bool, optional (default=False)
               Set to True to draw labels on the nodes.
        node_size: int, optional (default 40)
                The size of nodes in the plot.
        node_color: matplotlib color spec, optional (default black)
                The color to render nodes.
        edge_cmap: matplotlib colormap, optional (default plt.cm.viridis)
                The colormap to color edges by (varying color by edge weight/distance). 
                Can be a cmap object.
        edge_width: float, optional (default 2)
                The linewidth to use for rendering edges.
        colorbar: bool, optional (default True)
                Whether to draw a colorbar.
        Returns
        -------
        axis : matplotlib axis
                The axis used the render the plot.
        """
        self._G = nx.Graph()
        for row in self._mrmst:
            self._G.add_edge(row[0], row[1], weight=row[2])
        data_dict = {index: tuple(row) for index, row in enumerate(self._data)}
        nx.set_node_attributes(self._G, data_dict, 'node_pos')
        if axis is None:
            axis = plt.gca()
        _, edge_weight = zip(*nx.get_edge_attributes(self._G,'weight').items())
        node_pos = nx.get_node_attributes(self._G, 'node_pos')
        edge_vmin =  0.0; edge_vmax = max(edge_weight)
        nx.draw_networkx(self._G, ax=axis, pos=node_pos, 
                         with_labels=with_labels, node_size=node_size, node_color=node_color,
                         width=edge_width, edge_color=edge_weight, edge_cmap=edge_cmap,
                         edge_vmin=edge_vmin, edge_vmax=edge_vmax) 
        if colorbar:
            sm = plt.cm.ScalarMappable(cmap=edge_cmap,
                                       norm=plt.Normalize(vmin=edge_vmin, vmax=edge_vmax))
            sm.set_array([])
            cbar = plt.colorbar(sm, ax=axis)
            cbar.set_label('mutual reachability distance', rotation=90)
#         plt.title('Mutual Reachability Minimum Spanning Tree', fontsize=28)
        plt.setp(plt.gca().get_xticklabels(), visible=False)
        plt.setp(plt.gca().get_yticklabels(), visible=False)
        plt.tight_layout()
        return axis

    @property
    def mrmst_graph(self):
        return self._mrmst  

HQSHIFT is the main work horse and the implementation of the Hierarchical Quick Shift algorithm:

In [17]:
from sklearn.base import BaseEstimator, ClusterMixin
from hdbscan.hdbscan_ import _tree_to_labels
from hdbscan.plots import CondensedTree, SingleLinkageTree
from hdbscan._hdbscan_linkage import label
from scipy.sparse import csgraph, csr_matrix
from scipy.spatial import ConvexHull
     
def _find_nth_smallest(a, n):
    return np.partition(a, n-1)[n-1]

def rbf_kernel(d,bw): return np.exp(-d**2/(2*bw**2))

def _extract_weight(elem):  return elem[2]['weight']

# Inherits from sklearn
class HQSHIFT(BaseEstimator, ClusterMixin):
    """Perform HQSHIFT clustering from vector array.

    HQSHIFT - Hierarchical Quick Shift
    Performs quick  shift over varying tau values and determines
    the result to find a clustering that gives the best stability.
    This allows HQSHIFT to find clusters of varying tau(s) and be 
    more robust to parameter selection.
    
    Some of the listed "Parameters" and "Attributes" are ussed 
    in the same manner as HDBSCAN. Differences are stated explicitly.
    
    Parameters
    ----------
    copy_all_inputs: bool, optional (default=True)
        Params for MLpack's Euclidian minimum spanning tree (emst) and 
        Fast Max-Kernel Search (FastMKS).
        If specified, all input parameters will be deep copied before the method 
        is run. This is useful for debugging problems where the input parameters 
        are being modified by the algorithm, but can slow down the code.
    verbose: bool, optional (default=False)
        Params for MLpack's Euclidian minimum spanning tree (emst) and 
        Fast Max-Kernel Search (FastMKS).
        Display informational messages and the full list of parameters and 
        timers at the end of execution.
    leaf_size: int, optional (default=20)
        Leaf size in the kd-tree built by MLpack's Euclidian minimum spanning 
        tree (emst) routine. One-element leaves give the empirically best 
        performance, but at the cost of greater memory requirements.
    kernel: str, optional (default=‘gaussian’)
        Kernel type to use in FastMKS: ‘gaussian’, ‘epanechnikov’, ‘triangular’ or 'cosine'.
    base: float, optional (default=2.0)
        Base to use during cover tree construction for MLpack's FastMKS.
    min_mode_size : int, optional (default=12)
        The minimum size of modal-set; single linkage splits that contain
        fewer points than this will be considered points "falling out" of a
        modal-set rather than a modal-set splitting into two new modal-sets.
    min_cluster_size : int, optional (default=60) 
        The minimum size of clusters; single linkage splits that contain
        fewer points than this will be considered points "falling out" of a
        cluster rather than a cluster splitting into two new clusters.
    min_samples : int, optional (default=5)
        The number of samples in a neighborhood for a point (k-nn)
        to be considered as a core point. This includes the point itself.
    cluster_selection_method : string, optional (default='eom')
        The method used to select clusters from the condensed tree. Similar to 
        the HDBSCAN*, HQSHIFT uses an "Excess of Mass" algorithm to find the most 
        persistent clusters. Alternatively you can instead select the clusters 
        at the leaves of the tree -- this provides the most fine grained and 
        homogeneous clusters. Routines imported directly from HDBSCAN. 
        Options are:
            * ``eom``
            * ``leaf``
   allow_single_cluster : bool, optional (default=False)
        Similar to HDBSCAN*, HQSHIFT by default will not produce a single cluster, 
        setting this to True will override this and allow single cluster results in
        the case that you feel this is a valid result for your dataset.
        
    Attributes
    ----------
    labels_ : ndarray, shape (n_samples, )
        Cluster labels for each point in the dataset given to fit().
        Noisy samples are given the label -1.
    probabilities_ : ndarray, shape (n_samples, )
        Differing from HDBSCAN*, HQSHIFT estimates the probability density
        of each sample with FastMKS using different kernels. 
    cluster_persistence_ : ndarray, shape (n_clusters, )
        A score of how persistent each cluster is. A score of 1.0 represents
        a perfectly stable cluster that persists over all distance scales,
        while a score of 0.0 represents a perfectly ephemeral cluster. These
        scores can be guage the relative coherence of the clusters output
        by the algorithm.
    _condensed_tree : CondensedTree object
        The condensed tree produced by HDBSCAN. The object has methods
        for converting to pandas, networkx, and plotting.
    _single_linkage_tree : SingleLinkageTree object
        The single linkage tree produced by HDBSCAN. The object has methods
        for converting to pandas, networkx, and plotting.
    _emst : Euclidian MinimumSpanningTree object
        The euclidian minimum spanning tree  generated by HQSHIFT to estimate
        optimal kernel bandwidth. 
        
    References
    ----------
    .. [1] L. McInnes, J. Healy, S. Astels, hdbscan: Hierarchical density 
       based clustering In: Journal of Open Source Software, The Open Journal, 
       volume 2, number 11. 2017
    .. [2] Campello, R. J., Moulavi, D., & Sander, J. (2013, April).
       Density-based clustering based on hierarchical density estimates.
       In Pacific-Asia Conference on Knowledge Discovery and Data Mining
       (pp. 160-172). Springer Berlin Heidelberg.
    .. [3] Vedaldi A., Soatto S. (2008) Quick Shift and Kernel Methods for 
       Mode Seeking. In: Forsyth D., Torr P., Zisserman A. (eds) Computer Vision – ECCV 2008. 
       Lecture Notes in Computer Science, vol 5305. Springer, Berlin, Heidelberg.
    .. [4] March, William B. and Ram, Parikshit and Gray, Alexander G.
       Fast Euclidean minimum spanning tree: algorithm, analysis, and applications.
       Proceedings of the 16th ACM SIGKDD International Conference on Knowledge Discovery 
       and Data Mining (KDD '10), pp. 603-612, (2010), ACM
    .. [5] Curtin, Ryan R. and Ram, Parikshit and Gray, Alexander G. 
       Fast Exact Max-Kernel Search, Proceedings of the 2013 SIAM International Conference 
       on Data Mining (SDM '13), 2013, pp.1-9
    .. [6] Curtin, Ryan R. and Ram, Parikshit.
       Dual-tree fast exact max-kernel search, Statistical Analysis and Data Mining, vol 7,
       Wiley Subscription Services, Inc., A Wiley Company, pp. 229-253, 2014
    .. [7] Sreevani, C.A. Murthy.
       On bandwidth selection using minimal spanning tree for kernel density estimation,
       Computational Statistics and Data Analysis 102 (2016), pp.67–84
    """
    def __init__(self, copy_all_inputs=True, verbose=False,
                 leaf_size=20, kernel='gaussian', base=2.0,
                 min_cluster_size=60, min_mode_size=12, min_samples=5,
                 cluster_selection_method='eom', allow_single_cluster=False):
        self.copy_all_inputs = copy_all_inputs
        self.verbose = verbose
        self.leaf_size = leaf_size
        self.base = base
        self.kernel = kernel
        self.min_mode_size = min_mode_size
        self.min_cluster_size = min_cluster_size
        self.min_samples = min_samples
        self.cluster_selection_method = cluster_selection_method
        self.allow_single_cluster = allow_single_cluster
        
        self._emst = None
        self._mrmst = None
        self._condensed_tree = None
        self._single_linkage_tree = None
    
    def _compute_medoids(self, dist_matrix, weight_matrix=None):
        if weight_matrix is None:
            weight_matrix = rbf_kernel(d=dist_matrix, bw=self._emst._bandwidth)
        P = np.sum(weight_matrix, axis=0)
        prob = P
        P = P[:, None] - P
        dist_matrix[dist_matrix==0] = np.max(dist_matrix)/2
        S = np.sign(P) * (1/dist_matrix)
        medoids = np.argmax(S, axis=0)
        return medoids, prob
    
    def _quick_shift(self, data, dist_matrix, weight_matrix=None): 
        medoids,prob= self._compute_medoids(dist_matrix, weight_matrix)
        stationary_idx = []
        for i in range(len(medoids)):
            if medoids[i] == i:
                stationary_idx.append(i)
        cls_ctrs = data[stationary_idx]
        labels, traj_lst  = [],[]
        labels_val = {}
        lab = 0
        for i in stationary_idx:
            labels_val[i] = lab
            lab += 1
        for i in range(len(data)):
            next_med = medoids[i]
            traj = []
            while next_med not in stationary_idx:
                traj.append(next_med.item())
                next_med = medoids[next_med]
            traj.append(next_med.item())
            traj_lst.append(np.asarray(traj))
            labels.append(labels_val[next_med.item()])
        return cls_ctrs, np.asarray(labels), np.asarray(stationary_idx),\
               np.asarray(traj_lst), np.asarray(prob)  
    
    def _quick_shift_fastmks(self, data):
        # for each point in the query set, the k points in the reference set 
        # with maximum kernel evaluations are found
        output = fastmks(k=data.shape[0], reference=data, bandwidth=self._emst.bandwidth,
                         base=self.base, kernel=self.kernel,
                         copy_all_inputs=self.copy_all_inputs)
        indices = output['indices']
        kernels = output['kernels']
        indices[:,-1]=np.arange(indices.shape[0])
        sorted_indices = np.argsort(indices)
        kernel_srt = np.array([[kernels[ri, col]] for ri,row in enumerate(sorted_indices) 
                                                  for col in row]).reshape(indices.shape)
        #small_number = _find_nth_smallest(np.ravel(kernel_srt),indices.shape[0]+1)
        small_number = np.median(kernel_srt)
        np.fill_diagonal(kernel_srt, small_number)
        # return a pseudo distance as the inverse of evaluations (val) of kernels
        #dist_matrix = np.sqrt(-log(kernel_srt+np.finfo(float).eps)*self._emst.bandwidth)*self._emst.bandwidth
        dist_matrix = -log(kernel_srt+np.finfo(float).eps)*self._emst.bandwidth*self._emst.bandwidth
        return dist_matrix, kernel_srt
    
    def _construct_graph(self, X):
        dist_matrix, weight_matrix = self._quick_shift_fastmks(data=X)
        cls_ctrs, cls_labels, stat_idx, traj, prob =\
            self._quick_shift(data=X, dist_matrix=dist_matrix, weight_matrix=weight_matrix)
        self.paths = [path.tolist() for path in traj]
        # Hierarchical Quick Shift Tree
        visited = np.zeros(len(self.paths))
        self._hqst = []
        for i, pth in enumerate(self.paths):
            # no edges inserted between stationary node and itsef!
            if i != stat_idx[0]:
                # append the current node to the beginning of the path
                cpth = [i] + pth
                for j in np.arange(len(cpth)-1): 
                    self._hqst.append([cpth[j], cpth[j+1], dist_matrix[cpth[j], cpth[j+1]]])
        
        self._hqst = np.array(self._hqst)
        _hqst_sparse = csr_matrix((self._hqst.T[2], (self._hqst.T[0], self._hqst.T[1])),
                                  shape=dist_matrix.shape) 
        # Compute the minimum spanning tree for the sparse graph
        self._hqst_mst = csgraph.minimum_spanning_tree(_hqst_sparse)
        # Convert the graph to scipy cluster array format
        nonzeros = self._hqst_mst.nonzero()
        nonzero_vals = self._hqst_mst[nonzeros]
        self._hqst_mst = np.vstack(nonzeros + (nonzero_vals,)).T
        # Sort edges of the min_spanning_tree by weight
        self._hqst_mst = np.asarray(self._hqst_mst[np.argsort(self._hqst_mst.T[2]),:][0])
        # Perform single linkage Hierarchical Quick Shift Minimum Spanning Tree
        sltree = label(self._hqst_mst)
        (self.quick_shift_labels_, 
         self.quick_shift_probabilities_, 
         self.quick_shift_cluster_persistence, 
         self._quick_shift_condensed_tree, 
         self._quick_shift_single_linkage_tree) =\
        (_tree_to_labels(X=X, single_linkage_tree=sltree,
                         cluster_selection_method=self.cluster_selection_method,
                         allow_single_cluster=self.allow_single_cluster,
                         min_cluster_size=self.min_mode_size))
        self.find_idx_roots(X)
        # Perform single linkage Mutual Reachability Minimum Spanning Tree
        sltree = label(self._mrmst.mrmst_graph)
        (self.labels_, 
         self.probabilities_, 
         self.cluster_persistence, 
         self._condensed_tree, 
         self._single_linkage_tree) =\
        (_tree_to_labels(X=X, single_linkage_tree=sltree,
                         cluster_selection_method=self.cluster_selection_method,
                         allow_single_cluster=self.allow_single_cluster,
                         min_cluster_size=self.min_cluster_size))
    def hqshift(self, X):
        # sanity checks
        if self.leaf_size < 1:
            raise ValueError('leaf size shall be greater than 0')
        if self.kernel not in ['gaussian', 'epanechnikov', 'triangular', 'cosine']:
            raise ValueError('Invalid kernel: %s\n'
                             'Shall be one of: "gaussian", "epanechnikov", "triangular", "cosine"\n' )
        if type(self.min_samples) is not int or type(self.min_cluster_size) is not int:
            raise ValueError('Min samples and min cluster size must be integers!')
        if self.min_samples <= 0 or self.min_cluster_size <= 0:
            raise ValueError('min cluster size shall be positive integer')
        if self.cluster_selection_method not in ('eom', 'leaf'):
            raise ValueError('Invalid cluster selection method: %s\n'
                             'Shall be one of: "eom", "leaf"\n')

        # construct Euclidian Minimum Spanning Tree for bandwidth estimation 
        # bandwidth to use in FastMKS for Gaussian, Epanechnikov, and triangular kernels. 
        # this is the "spread" of the kernel.
        self._emst = EuclideanMinimumSpanningTree(data=X, copy_all_inputs=self.copy_all_inputs,
                                                  verbose=self.verbose, leaf_size=self.leaf_size)
        self._mrmst = MutualReachabilityMinimumSpanningTree(data=X, min_samples=self.min_samples,
                                                            leaf_size=self.leaf_size)
        self._construct_graph(X=X)
        
    def plot_mrmst(self, X, axis=None, with_labels=False, node_size=40, alpha=0.5):
        """Plot the mutual reachability minimum spanning tree with labels.
        
        Parameters
        ----------
        axis: matplotlib axis, optional (default=None)
               The axis to render the plot to
        with_labels: bool, optional (default=False)
               Set to True to draw labels on the nodes.
        node_size: int, optional (default 40)
                The size of nodes in the plot.
        alpha: float, the edge transparency (default 0.5)
        
        Returns
        -------
        axis : matplotlib axis
                The axis used the render the plot.
        """        
        self._G = nx.Graph()
        nodes = np.arange(X.shape[0])
        self._G.add_nodes_from(nodes)
        for row in self._mrmst.mrmst_graph:
            self._G.add_edge(row[0], row[1], weight=row[2])
        data_dict = {index: tuple(row) for index, row in enumerate(X)}
        nx.set_node_attributes(self._G, data_dict, 'node_pos')
        if axis is None:
            axis = plt.gca()
        node_pos = nx.get_node_attributes(self._G, 'node_pos')
        
        max_n_labels = np.maximum(8, np.unique(self.labels_).max() + 1)
        palette = sns.hls_palette(max_n_labels, l=.3, s=.8) 
        node_color = [palette[x] if x >= 0 else (0.0, 0.0, 0.0) for x in self.labels_]
        nx.draw_networkx(self._G, ax=axis, pos=node_pos, with_labels=False, alpha=alpha, 
                         node_color=node_color, node_size=node_size)
#         plt.title('HQuickSHIFT Mutual Reachability Graph, number of Clusters: {:d}'.format(\
#                   np.unique(self.labels_).max() + 1), fontsize=28)
        plt.setp(plt.gca().get_xticklabels(), visible=False)
        plt.setp(plt.gca().get_yticklabels(), visible=False)
        return axis
    
    def find_idx_roots(self, X):
        self._Di_G = nx.DiGraph()
        nodes = np.arange(X.shape[0])
        self._Di_G.add_nodes_from(nodes)
        for row in self._hqst_mst:
            self._Di_G.add_edge(row[0], row[1], weight=row[2])
        data_dict = {index: tuple(row) for index, row in enumerate(X)}

        total_n_clusters = np.max(self.quick_shift_labels_) + 1
        cluster_range = np.arange(-1, total_n_clusters)\
            if -1 in self.quick_shift_labels_ else np.arange(total_n_clusters) 
        # Find root node of each subcluster including noise with a node (out_degree==0)
        self._idx_root = []
        for idx_clst in cluster_range:
            sub_nodes = np.where(self.quick_shift_labels_==idx_clst)[0]
            sub_G = nx.DiGraph(self._Di_G.subgraph(sub_nodes))
            idx_subg_root = list(node for node, out_degree in sub_G.out_degree() 
                                 if out_degree == 0)[0]
            self._idx_root.append(idx_subg_root)
        
    def plot(self, X, axis=None, with_labels=False, node_size=40, alpha=0.5, 
             plot_edge=True, plot_hull=True, file_name=None):
        """Plot the hierarchical quick shift tree with labels.
        
        Parameters
        ----------
        axis: matplotlib axis, optional (default None)
               The axis to render the plot to
        with_labels: bool, optional (default False)
               Set to True to draw labels on the nodes.
        node_size: int, optional (default 40)
                The size of nodes in the plot.
        alpha: float, the edge transparency (default 0.5)
        plot_edge: bool, optional (default True)
        plot_hull: bool, optional (default True)
        
        Returns
        -------
        axis : matplotlib axis
                The axis used the render the plot.
        """        
        self._Di_G = nx.DiGraph()
        nodes = np.arange(X.shape[0])
        self._Di_G.add_nodes_from(nodes)
        for row in self._hqst_mst:
            self._Di_G.add_edge(row[0], row[1], weight=row[2])
        data_dict = {index: tuple(row) for index, row in enumerate(X)}
        nx.set_node_attributes(self._Di_G, data_dict, 'node_pos')
        if axis is None:
            axis = plt.gca()
        node_pos = nx.get_node_attributes(self._Di_G, 'node_pos')
        #palette = plt.cm.tab10(np.linspace(0, 1, np.unique(self.labels_).max() + 1))
        #palette = sns.color_palette('deep', np.unique(self.labels_).max() + 1)
        max_n_labels = np.maximum(8, np.unique(self.labels_).max() + 1)
        palette = sns.hls_palette(max_n_labels, l=.3, s=.8) 
        node_color = [palette[x] if x >= 0 else (0.0, 0.0, 0.0) for x in self.labels_]
        
        total_n_clusters = np.max(self.quick_shift_labels_) + 1
        cluster_range = np.arange(-1, total_n_clusters)\
            if -1 in self.quick_shift_labels_ else np.arange(total_n_clusters) 
        # Find root node of each subcluster including noise with a node (out_degree==0)
        self._idx_root = []
        for idx_clst in cluster_range:
            sub_nodes = np.where(self.quick_shift_labels_==idx_clst)[0]
            sub_G = nx.DiGraph(self._Di_G.subgraph(sub_nodes))
            idx_subg_root = list(node for node, out_degree in sub_G.out_degree() 
                                 if out_degree == 0)[0]
            axis.plot((X[idx_subg_root][0]), (X[idx_subg_root][1]), 'X', color='k', markersize=12)
            self._idx_root.append(idx_subg_root)
            if plot_hull:
                node_labels = self.labels_[sub_nodes]
                points=X[sub_nodes][node_labels!=-1]
                hull = ConvexHull(points)
                for simplex in hull.simplices:
                    axis.plot(points[simplex, 0], points[simplex, 1], 'k--')      
        #self._Di_G.remove_edges_from(list(self._Di_G.edges(_idx_root)))
        #edge_weights = list(nx.get_edge_attributes(self._Di_G,'weight').values())
        edge_weights = 2
        if not plot_edge:
            self._Di_G.remove_edges_from(list(self._Di_G.edges(nodes)))
        nx.draw_networkx(self._Di_G, ax=axis, pos=node_pos, with_labels=False, alpha=alpha,
                         node_color=node_color, node_size=node_size, width=edge_weights,
                         arrowsize=14, arrowstyle='->')
#         plt.title('HQSHIFT Mode Attraction Graph, # of Modes: {:d}, # of Clusters: {:d}'.format(\
#                   np.unique(self.quick_shift_labels_).max() + 1, np.unique(self.labels_).max() + 1),
#                   fontsize=28)
#         if X.shape[1] == 2:
#             plt.xlabel("$x_0$", fontsize=28)
#             plt.ylabel("$x_1$", fontsize=28, rotation=90)
        plt.setp(axis.get_xticklabels(), visible=False)
        plt.setp(axis.get_yticklabels(), visible=False)
        plt.tight_layout()
        if file_name is not None: plt.savefig(file_name, dpi=200, bbox_inches='tight')
        return axis

    def fit(self, X, y=None):
        """Perform HQSHIFT clustering from features.
        
        Parameters
        ----------
        X : array of shape (n_samples, n_features)

        Returns
        -------
        self : object
            Returns self
        """
        self.hqshift(X)
        return self
    
    def fit_predict(self, X, y=None):
        """Performs clustering on X and returns cluster labels.

        Parameters
        ----------
        X : array of shape (n_samples, n_features)

        Returns
        -------
        y : ndarray, shape (n_samples, )
            cluster labels
        """
        self.fit(X)
        return self.labels_
    
    @property
    def single_linkage_tree(self):
        if self._single_linkage_tree is not None:
            return SingleLinkageTree(self._single_linkage_tree)
        else:
            raise AttributeError('No single linkage tree was generated; try running fit'
                 ' first.')
    @property
    def condensed_tree(self):
        if self._condensed_tree is not None:
            return CondensedTree(self._condensed_tree,
                                 self.cluster_selection_method,
                                 self.allow_single_cluster)
        else:
            raise AttributeError('No condensed tree was generated; try running fit first.')
            
    @property
    def quick_shift_condensed_tree(self):
        if self._quick_shift_condensed_tree is not None:
            return CondensedTree(self._quick_shift_condensed_tree,
                                 self.cluster_selection_method,
                                 self.allow_single_cluster)
        else:
            raise AttributeError('No quickshift condensed tree was generated; try running fit first.')

With a working example, we can demonstrate the features of Hierarchical Quick Shift algorithm and its advantages over QuickshiftPP. First, we create a toy dataset:

Prepare Dataset for RNN

In [18]:
n_samples=350
n_noise_pts = 600
add_uniform_noise = True

def generate_data(n_samples, add_uniform_noise=add_uniform_noise,
                  plot_data=True, plt_title=None, file_name=None):
    random_seed(seed_value=seed, use_cuda=use_cuda)
    moons, lbls_moons = data.make_moons(n_samples=n_samples, noise=0.05, random_state=seed)
    blobs, lbls_blobs = data.make_blobs(n_samples=n_samples, centers=[(-1.5, 3.0), (1.5, 3.0)],
                               cluster_std=[0.15, 0.25], random_state=seed)
    
    # shear angles
    theta = np.radians([30, -30, -30, 30])
    t = np.tan(theta)

    blob3, lbls_blob3 = data.make_blobs(n_samples=n_samples//3,
                                        centers=[(-0.95, 0.75)],
                                        cluster_std=[0.10], random_state=seed)
    shear_x = np.array(((1, 0), (t[0], 1))).T
    blob3 = blob3.dot(shear_x)
    
    blob4, lbls_blob4 = data.make_blobs(n_samples=n_samples//4,
                                         centers=[(0.95, 0.75)],
                                         cluster_std=[0.10], random_state=seed)
    shear_x = np.array(((1, 0), (t[1], 1))).T
    blob4 = blob4.dot(shear_x)
    
    blob5, lbls_blob5 = data.make_blobs(n_samples=n_samples//4,
                                         centers=[(0.15, 0.0)],
                                         cluster_std=[0.10], random_state=seed)
    shear_x = np.array(((1, t[2]), (0, 1))).T
    blob5 = blob5.dot(shear_x)
    
    blob6, lbls_blob6 = data.make_blobs(n_samples=n_samples//3,
                                         centers=[(1.85, 0.0)],
                                         cluster_std=[0.10], random_state=seed)
    shear_x = np.array(((1, t[3]), (0, 1))).T
    blob6 = blob6.dot(shear_x)
        
    #import pdb;pdb.set_trace()
    
    test_data = np.vstack([moons, blobs, blob3, blob4, blob5, blob6])
    test_data_lbls = np.hstack([lbls_moons, lbls_blobs+2,
                                lbls_blob3+4, lbls_blob4+5, lbls_blob5+6, lbls_blob6+7])

    if add_uniform_noise:
        assert(test_data.shape[1]==2)
        (min0, min1), (max_0, max_1) = np.min(test_data, axis=0), np.max(test_data, axis=0)
        noise_data = np.random.RandomState(seed).uniform([min0, min1],[max_0, max_1], size=(n_noise_pts, 2))
        noise_lbls = np.array([-1]*n_noise_pts)
        test_data = np.vstack([test_data, noise_data])
        test_data_lbls = np.hstack([test_data_lbls, noise_lbls])
  
    test_data = shuffle(test_data, random_state=seed)
    test_data_lbls = shuffle(test_data_lbls, random_state=seed)
    # normalize dataset for easier parameter selection
    test_data_std = StandardScaler().fit_transform(test_data)
    #test_data_std = test_data
    #import pdb;pdb.set_trace()
    if plot_data:
        plot_clusters(data=test_data_std, labels=test_data_lbls,
                      file_name=file_name, plt_title=plt_title)
    return test_data_std
In [19]:
test_data = generate_data(n_samples=n_samples, add_uniform_noise=True,
                          file_name='blob_moon_dataset.png')
curaxes = plt.gca()
ax_xlim = curaxes.get_xlim()
ax_ylim = curaxes.get_ylim()
In [20]:
from QuickshiftPP import *
plot_clusters(data=test_data, algorithm=QuickshiftPP,
              args=(), kwds={'k':20, 'beta':.7}, file_name='blob_moon_qspp.png')
curaxes = plt.gca()
curaxes.set_xlim(ax_xlim)
curaxes.set_ylim(ax_ylim)
plt.show()
In [21]:
hqshift = HQSHIFT(min_cluster_size=125,
                  min_mode_size=150,
                  min_samples=15,
                  allow_single_cluster=False).fit(test_data)
plt.figure(figsize=fig_size)
curaxes = plt.gca()
curaxes.set_xlim(ax_xlim)
curaxes.set_ylim(ax_ylim)
hqshift.plot(test_data, plot_edge=True, plot_hull=True, node_size=30, alpha=0.4,
             file_name='blob_moon_hqshift.png')
plt.show()
In [22]:
plt.figure(figsize=fig_size)
hqshift._emst.plot()
print(hqshift._emst.bandwidth)
plt.savefig('emst.png', bbox_inches='tight')
plt.show()
0.20421907202880743
In [23]:
plt.figure(figsize=fig_size)
hqshift._mrmst.plot()
plt.savefig('mrmst.png', bbox_inches='tight')
plt.show()
In [24]:
plt.figure(figsize=(16,12))
max_n_labels = np.maximum(8, np.unique(hqshift.labels_).max() + 1)
palette = sns.hls_palette(max_n_labels, l=.3, s=.8) 
hqshift.condensed_tree.plot(select_clusters=True,
                            selection_palette=palette,
                        log_size=False)
plt.setp(plt.gca().get_xticklabels(), visible=False)
plt.setp(plt.gca().get_yticklabels(), visible=False)
plt.tight_layout()
plt.show() 
In [25]:
plt.figure(figsize=(16,12))
max_n_labels = np.maximum(8, np.unique(hqshift.labels_).max() + 1)
palette = sns.hls_palette(max_n_labels, l=.3, s=.8) 
hqshift.quick_shift_condensed_tree.plot(select_clusters=False,
                                        selection_palette=palette,
                                        log_size=False)
plt.setp(plt.gca().get_xticklabels(), visible=False)
plt.setp(plt.gca().get_yticklabels(), visible=False)
plt.ylim(0.025)
plt.tight_layout()
plt.show() 

Recurrent Neural Network (RNN)

  • HQuickShift restricts the steps to sample points (and thus is a sample-based version of Mean Shift).
  • What happens if we have a new single point to cluster? How to assign it to a mode? Re-run the HQuickShift?
  • Or find the nn in sample set and go forward? Then we need to store all data samples!
  • Samples follow common paths towards to the local mode.
In [14]:
len(hqshift.paths), hqshift._idx_root
Out[14]:
(1706, [1178, 1479, 785, 968, 493, 1615])

Train a RNN from Scratch

In [15]:
clust_paths = [[j]+pth[:pth.index(idx)+1] for idx in hqshift._idx_root 
                                          for j, pth in enumerate(hqshift.paths) if idx in pth]
clust_data = [test_data[pth] for pth in clust_paths] 
clust_paths 
Out[15]:
[[13, 627, 1309, 755, 1293, 628, 1690, 1281, 477, 765, 1459, 1178],
 [23,
  142,
  1471,
  13,
  627,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [24,
  633,
  946,
  1703,
  1015,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [25, 323, 434, 691, 1329, 827, 1107, 147, 1163, 1178],
 [28,
  78,
  510,
  544,
  1253,
  1257,
  1415,
  1266,
  125,
  427,
  1381,
  509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [29, 1036, 551, 970, 198, 1653, 1131, 306, 1039, 1325, 1416, 1459, 1178],
 [33,
  379,
  1447,
  1009,
  1699,
  264,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [51,
  1344,
  840,
  838,
  165,
  670,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [71, 705, 1085, 1437, 1175, 74, 477, 765, 1459, 1178],
 [73, 1163, 1178],
 [74, 477, 765, 1459, 1178],
 [77, 470, 1537, 907, 73, 1163, 1178],
 [78,
  510,
  544,
  1253,
  1257,
  1415,
  1266,
  125,
  427,
  1381,
  509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [97,
  1377,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [114,
  432,
  314,
  221,
  594,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [125,
  427,
  1381,
  509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [131,
  1172,
  898,
  1417,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [134, 765, 1459, 1178],
 [142, 1471, 13, 627, 1309, 755, 1293, 628, 1690, 1281, 477, 765, 1459, 1178],
 [147, 1163, 1178],
 [165, 670, 288, 25, 323, 434, 691, 1329, 827, 1107, 147, 1163, 1178],
 [167,
  23,
  142,
  1471,
  13,
  627,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [185,
  1015,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [198, 1653, 1131, 306, 1039, 1325, 1416, 1459, 1178],
 [200, 1140, 1673, 775, 293, 1688, 659, 1049, 471, 956, 1483, 1178],
 [201, 822, 1319, 884, 730, 259, 1497, 1700, 134, 765, 1459, 1178],
 [205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [220,
  524,
  830,
  221,
  594,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [221, 594, 1309, 755, 1293, 628, 1690, 1281, 477, 765, 1459, 1178],
 [223, 1398, 921, 71, 705, 1085, 1437, 1175, 74, 477, 765, 1459, 1178],
 [240,
  1344,
  840,
  838,
  165,
  670,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [249,
  505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [259, 1497, 1700, 134, 765, 1459, 1178],
 [261,
  505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [264, 1653, 1131, 306, 1039, 1325, 1416, 1459, 1178],
 [267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [283,
  972,
  1355,
  1062,
  131,
  1172,
  898,
  1417,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [285,
  1591,
  371,
  142,
  1471,
  13,
  627,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [288, 25, 323, 434, 691, 1329, 827, 1107, 147, 1163, 1178],
 [291,
  1565,
  379,
  1447,
  1009,
  1699,
  264,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [293, 1688, 659, 1049, 471, 956, 1483, 1178],
 [298, 1272, 1302, 1252, 1148, 1390, 1588, 659, 1049, 471, 956, 1483, 1178],
 [306, 1039, 1325, 1416, 1459, 1178],
 [307, 201, 822, 1319, 884, 730, 259, 1497, 1700, 134, 765, 1459, 1178],
 [314, 221, 594, 1309, 755, 1293, 628, 1690, 1281, 477, 765, 1459, 1178],
 [323, 434, 691, 1329, 827, 1107, 147, 1163, 1178],
 [329, 589, 1080, 288, 25, 323, 434, 691, 1329, 827, 1107, 147, 1163, 1178],
 [330,
  504,
  223,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [345, 1252, 1148, 1390, 1588, 659, 1049, 471, 956, 1483, 1178],
 [348,
  1560,
  925,
  1009,
  1699,
  264,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [364, 831, 1329, 827, 1107, 147, 1163, 1178],
 [368,
  605,
  767,
  1151,
  903,
  97,
  1377,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [371,
  142,
  1471,
  13,
  627,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [379, 1447, 1009, 1699, 264, 1653, 1131, 306, 1039, 1325, 1416, 1459, 1178],
 [381,
  910,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [401,
  731,
  51,
  1344,
  840,
  838,
  165,
  670,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [407,
  510,
  544,
  1253,
  1257,
  1415,
  1266,
  125,
  427,
  1381,
  509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [421,
  114,
  432,
  314,
  221,
  594,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [427,
  1381,
  509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [432, 314, 221, 594, 1309, 755, 1293, 628, 1690, 1281, 477, 765, 1459, 1178],
 [434, 691, 1329, 827, 1107, 147, 1163, 1178],
 [442,
  922,
  1586,
  965,
  635,
  261,
  505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [447,
  1151,
  903,
  97,
  1377,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [448, 1497, 1700, 134, 765, 1459, 1178],
 [453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [470, 1537, 907, 73, 1163, 1178],
 [471, 956, 1483, 1178],
 [474,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [477, 765, 1459, 1178],
 [500,
  896,
  777,
  421,
  114,
  432,
  314,
  221,
  594,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [504, 223, 1398, 921, 71, 705, 1085, 1437, 1175, 74, 477, 765, 1459, 1178],
 [505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [508,
  633,
  946,
  1703,
  1015,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [510,
  544,
  1253,
  1257,
  1415,
  1266,
  125,
  427,
  1381,
  509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [524, 830, 221, 594, 1309, 755, 1293, 628, 1690, 1281, 477, 765, 1459, 1178],
 [536,
  861,
  946,
  1703,
  1015,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [540,
  1565,
  379,
  1447,
  1009,
  1699,
  264,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [541,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [544,
  1253,
  1257,
  1415,
  1266,
  125,
  427,
  1381,
  509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [545,
  1543,
  879,
  738,
  972,
  1355,
  1062,
  131,
  1172,
  898,
  1417,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [551, 970, 198, 1653, 1131, 306, 1039, 1325, 1416, 1459, 1178],
 [562,
  767,
  1151,
  903,
  97,
  1377,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [565,
  1103,
  909,
  1374,
  1055,
  1272,
  1302,
  1252,
  1148,
  1390,
  1588,
  659,
  1049,
  471,
  956,
  1483,
  1178],
 [568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [589, 1080, 288, 25, 323, 434, 691, 1329, 827, 1107, 147, 1163, 1178],
 [594, 1309, 755, 1293, 628, 1690, 1281, 477, 765, 1459, 1178],
 [596,
  536,
  861,
  946,
  1703,
  1015,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [604, 885, 448, 1497, 1700, 134, 765, 1459, 1178],
 [605,
  767,
  1151,
  903,
  97,
  1377,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [609,
  868,
  1591,
  371,
  142,
  1471,
  13,
  627,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [612,
  1228,
  474,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [621, 1526, 835, 1562, 1329, 827, 1107, 147, 1163, 1178],
 [623, 29, 1036, 551, 970, 198, 1653, 1131, 306, 1039, 1325, 1416, 1459, 1178],
 [627, 1309, 755, 1293, 628, 1690, 1281, 477, 765, 1459, 1178],
 [628, 1690, 1281, 477, 765, 1459, 1178],
 [632,
  612,
  1228,
  474,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [633,
  946,
  1703,
  1015,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [634, 822, 1319, 884, 730, 259, 1497, 1700, 134, 765, 1459, 1178],
 [635,
  261,
  505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [651,
  681,
  1423,
  953,
  653,
  249,
  505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [653,
  249,
  505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [655,
  1475,
  1660,
  131,
  1172,
  898,
  1417,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [656, 1175, 74, 477, 765, 1459, 1178],
 [659, 1049, 471, 956, 1483, 1178],
 [665, 1537, 907, 73, 1163, 1178],
 [670, 288, 25, 323, 434, 691, 1329, 827, 1107, 147, 1163, 1178],
 [675,
  740,
  442,
  922,
  1586,
  965,
  635,
  261,
  505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [681,
  1423,
  953,
  653,
  249,
  505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [685, 730, 259, 1497, 1700, 134, 765, 1459, 1178],
 [691, 1329, 827, 1107, 147, 1163, 1178],
 [693,
  1528,
  368,
  605,
  767,
  1151,
  903,
  97,
  1377,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [699,
  693,
  1528,
  368,
  605,
  767,
  1151,
  903,
  97,
  1377,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [705, 1085, 1437, 1175, 74, 477, 765, 1459, 1178],
 [707,
  220,
  524,
  830,
  221,
  594,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [723, 1196, 77, 470, 1537, 907, 73, 1163, 1178],
 [724,
  283,
  972,
  1355,
  1062,
  131,
  1172,
  898,
  1417,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [729,
  833,
  1611,
  761,
  1302,
  1252,
  1148,
  1390,
  1588,
  659,
  1049,
  471,
  956,
  1483,
  1178],
 [730, 259, 1497, 1700, 134, 765, 1459, 1178],
 [731,
  51,
  1344,
  840,
  838,
  165,
  670,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [738,
  972,
  1355,
  1062,
  131,
  1172,
  898,
  1417,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [740,
  442,
  922,
  1586,
  965,
  635,
  261,
  505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [755, 1293, 628, 1690, 1281, 477, 765, 1459, 1178],
 [761, 1302, 1252, 1148, 1390, 1588, 659, 1049, 471, 956, 1483, 1178],
 [765, 1459, 1178],
 [767,
  1151,
  903,
  97,
  1377,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [775, 293, 1688, 659, 1049, 471, 956, 1483, 1178],
 [777,
  421,
  114,
  432,
  314,
  221,
  594,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [786,
  635,
  261,
  505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [787,
  1004,
  1543,
  879,
  738,
  972,
  1355,
  1062,
  131,
  1172,
  898,
  1417,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [788,
  381,
  910,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [792,
  1600,
  1432,
  1263,
  1611,
  761,
  1302,
  1252,
  1148,
  1390,
  1588,
  659,
  1049,
  471,
  956,
  1483,
  1178],
 [808,
  1266,
  125,
  427,
  1381,
  509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [822, 1319, 884, 730, 259, 1497, 1700, 134, 765, 1459, 1178],
 [827, 1107, 147, 1163, 1178],
 [830, 221, 594, 1309, 755, 1293, 628, 1690, 1281, 477, 765, 1459, 1178],
 [831, 1329, 827, 1107, 147, 1163, 1178],
 [833,
  1611,
  761,
  1302,
  1252,
  1148,
  1390,
  1588,
  659,
  1049,
  471,
  956,
  1483,
  1178],
 [835, 1562, 1329, 827, 1107, 147, 1163, 1178],
 [838, 165, 670, 288, 25, 323, 434, 691, 1329, 827, 1107, 147, 1163, 1178],
 [840,
  838,
  165,
  670,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [848,
  24,
  633,
  946,
  1703,
  1015,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [861,
  946,
  1703,
  1015,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [868,
  1591,
  371,
  142,
  1471,
  13,
  627,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [879,
  738,
  972,
  1355,
  1062,
  131,
  1172,
  898,
  1417,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [882,
  1062,
  131,
  1172,
  898,
  1417,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [884, 730, 259, 1497, 1700, 134, 765, 1459, 1178],
 [885, 448, 1497, 1700, 134, 765, 1459, 1178],
 [896,
  777,
  421,
  114,
  432,
  314,
  221,
  594,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [898, 1417, 1398, 921, 71, 705, 1085, 1437, 1175, 74, 477, 765, 1459, 1178],
 [899,
  330,
  504,
  223,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [903,
  97,
  1377,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [907, 73, 1163, 1178],
 [909,
  1374,
  1055,
  1272,
  1302,
  1252,
  1148,
  1390,
  1588,
  659,
  1049,
  471,
  956,
  1483,
  1178],
 [910,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [921, 71, 705, 1085, 1437, 1175, 74, 477, 765, 1459, 1178],
 [922,
  1586,
  965,
  635,
  261,
  505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [924,
  632,
  612,
  1228,
  474,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [925, 1009, 1699, 264, 1653, 1131, 306, 1039, 1325, 1416, 1459, 1178],
 [932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [945, 1087, 634, 822, 1319, 884, 730, 259, 1497, 1700, 134, 765, 1459, 1178],
 [946,
  1703,
  1015,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [948,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [953,
  653,
  249,
  505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [956, 1483, 1178],
 [965,
  635,
  261,
  505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [970, 198, 1653, 1131, 306, 1039, 1325, 1416, 1459, 1178],
 [972,
  1355,
  1062,
  131,
  1172,
  898,
  1417,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [975,
  731,
  51,
  1344,
  840,
  838,
  165,
  670,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [1004,
  1543,
  879,
  738,
  972,
  1355,
  1062,
  131,
  1172,
  898,
  1417,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [1009, 1699, 264, 1653, 1131, 306, 1039, 1325, 1416, 1459, 1178],
 [1012, 1085, 1437, 1175, 74, 477, 765, 1459, 1178],
 [1015,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [1029, 665, 1537, 907, 73, 1163, 1178],
 [1036, 551, 970, 198, 1653, 1131, 306, 1039, 1325, 1416, 1459, 1178],
 [1039, 1325, 1416, 1459, 1178],
 [1043, 1379, 621, 1526, 835, 1562, 1329, 827, 1107, 147, 1163, 1178],
 [1047, 1173, 1665, 1352, 656, 1175, 74, 477, 765, 1459, 1178],
 [1049, 471, 956, 1483, 1178],
 [1055, 1272, 1302, 1252, 1148, 1390, 1588, 659, 1049, 471, 956, 1483, 1178],
 [1058,
  1652,
  632,
  612,
  1228,
  474,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [1062,
  131,
  1172,
  898,
  1417,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [1070,
  1151,
  903,
  97,
  1377,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [1080, 288, 25, 323, 434, 691, 1329, 827, 1107, 147, 1163, 1178],
 [1081, 1537, 907, 73, 1163, 1178],
 [1085, 1437, 1175, 74, 477, 765, 1459, 1178],
 [1087, 634, 822, 1319, 884, 730, 259, 1497, 1700, 134, 765, 1459, 1178],
 [1103,
  909,
  1374,
  1055,
  1272,
  1302,
  1252,
  1148,
  1390,
  1588,
  659,
  1049,
  471,
  956,
  1483,
  1178],
 [1107, 147, 1163, 1178],
 [1131, 306, 1039, 1325, 1416, 1459, 1178],
 [1140, 1673, 775, 293, 1688, 659, 1049, 471, 956, 1483, 1178],
 [1143,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [1148, 1390, 1588, 659, 1049, 471, 956, 1483, 1178],
 [1151,
  903,
  97,
  1377,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [1160, 1392, 1390, 1588, 659, 1049, 471, 956, 1483, 1178],
 [1163, 1178],
 [1172,
  898,
  1417,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [1173, 1665, 1352, 656, 1175, 74, 477, 765, 1459, 1178],
 [1175, 74, 477, 765, 1459, 1178],
 [1187,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [1196, 77, 470, 1537, 907, 73, 1163, 1178],
 [1216,
  1576,
  1558,
  899,
  330,
  504,
  223,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [1228,
  474,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [1231, 1043, 1379, 621, 1526, 835, 1562, 1329, 827, 1107, 147, 1163, 1178],
 [1234,
  97,
  1377,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [1252, 1148, 1390, 1588, 659, 1049, 471, 956, 1483, 1178],
 [1253,
  1257,
  1415,
  1266,
  125,
  427,
  1381,
  509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [1257,
  1415,
  1266,
  125,
  427,
  1381,
  509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [1263,
  1611,
  761,
  1302,
  1252,
  1148,
  1390,
  1588,
  659,
  1049,
  471,
  956,
  1483,
  1178],
 [1266,
  125,
  427,
  1381,
  509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [1272, 1302, 1252, 1148, 1390, 1588, 659, 1049, 471, 956, 1483, 1178],
 [1275,
  1360,
  707,
  220,
  524,
  830,
  221,
  594,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [1281, 477, 765, 1459, 1178],
 [1292,
  407,
  510,
  544,
  1253,
  1257,
  1415,
  1266,
  125,
  427,
  1381,
  509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [1293, 628, 1690, 1281, 477, 765, 1459, 1178],
 [1301,
  1638,
  1360,
  707,
  220,
  524,
  830,
  221,
  594,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [1302, 1252, 1148, 1390, 1588, 659, 1049, 471, 956, 1483, 1178],
 [1309, 755, 1293, 628, 1690, 1281, 477, 765, 1459, 1178],
 [1319, 884, 730, 259, 1497, 1700, 134, 765, 1459, 1178],
 [1325, 1416, 1459, 1178],
 [1327, 723, 1196, 77, 470, 1537, 907, 73, 1163, 1178],
 [1329, 827, 1107, 147, 1163, 1178],
 [1344,
  840,
  838,
  165,
  670,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [1347, 589, 1080, 288, 25, 323, 434, 691, 1329, 827, 1107, 147, 1163, 1178],
 [1352, 656, 1175, 74, 477, 765, 1459, 1178],
 [1355,
  1062,
  131,
  1172,
  898,
  1417,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [1360,
  707,
  220,
  524,
  830,
  221,
  594,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [1374,
  1055,
  1272,
  1302,
  1252,
  1148,
  1390,
  1588,
  659,
  1049,
  471,
  956,
  1483,
  1178],
 [1377,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [1379, 621, 1526, 835, 1562, 1329, 827, 1107, 147, 1163, 1178],
 [1381,
  509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [1385,
  792,
  1600,
  1432,
  1263,
  1611,
  761,
  1302,
  1252,
  1148,
  1390,
  1588,
  659,
  1049,
  471,
  956,
  1483,
  1178],
 [1389, 1131, 306, 1039, 1325, 1416, 1459, 1178],
 [1390, 1588, 659, 1049, 471, 956, 1483, 1178],
 [1392, 1390, 1588, 659, 1049, 471, 956, 1483, 1178],
 [1398, 921, 71, 705, 1085, 1437, 1175, 74, 477, 765, 1459, 1178],
 [1415,
  1266,
  125,
  427,
  1381,
  509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [1416, 1459, 1178],
 [1417, 1398, 921, 71, 705, 1085, 1437, 1175, 74, 477, 765, 1459, 1178],
 [1423,
  953,
  653,
  249,
  505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [1432,
  1263,
  1611,
  761,
  1302,
  1252,
  1148,
  1390,
  1588,
  659,
  1049,
  471,
  956,
  1483,
  1178],
 [1437, 1175, 74, 477, 765, 1459, 1178],
 [1440, 1673, 775, 293, 1688, 659, 1049, 471, 956, 1483, 1178],
 [1447, 1009, 1699, 264, 1653, 1131, 306, 1039, 1325, 1416, 1459, 1178],
 [1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [1459, 1178],
 [1465,
  777,
  421,
  114,
  432,
  314,
  221,
  594,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [1469, 921, 71, 705, 1085, 1437, 1175, 74, 477, 765, 1459, 1178],
 [1471, 13, 627, 1309, 755, 1293, 628, 1690, 1281, 477, 765, 1459, 1178],
 [1474,
  285,
  1591,
  371,
  142,
  1471,
  13,
  627,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [1475,
  1660,
  131,
  1172,
  898,
  1417,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [1476,
  924,
  632,
  612,
  1228,
  474,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [1483, 1178],
 [1497, 1700, 134, 765, 1459, 1178],
 [1524,
  541,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [1526, 835, 1562, 1329, 827, 1107, 147, 1163, 1178],
 [1528,
  368,
  605,
  767,
  1151,
  903,
  97,
  1377,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [1532,
  975,
  731,
  51,
  1344,
  840,
  838,
  165,
  670,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [1537, 907, 73, 1163, 1178],
 [1543,
  879,
  738,
  972,
  1355,
  1062,
  131,
  1172,
  898,
  1417,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [1548, 1160, 1392, 1390, 1588, 659, 1049, 471, 956, 1483, 1178],
 [1551,
  910,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [1558,
  899,
  330,
  504,
  223,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [1559, 775, 293, 1688, 659, 1049, 471, 956, 1483, 1178],
 [1560, 925, 1009, 1699, 264, 1653, 1131, 306, 1039, 1325, 1416, 1459, 1178],
 [1562, 1329, 827, 1107, 147, 1163, 1178],
 [1565,
  379,
  1447,
  1009,
  1699,
  264,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [1573,
  447,
  1151,
  903,
  97,
  1377,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [1576,
  1558,
  899,
  330,
  504,
  223,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [1586,
  965,
  635,
  261,
  505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [1588, 659, 1049, 471, 956, 1483, 1178],
 [1591,
  371,
  142,
  1471,
  13,
  627,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [1599,
  401,
  731,
  51,
  1344,
  840,
  838,
  165,
  670,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [1600,
  1432,
  1263,
  1611,
  761,
  1302,
  1252,
  1148,
  1390,
  1588,
  659,
  1049,
  471,
  956,
  1483,
  1178],
 [1604, 604, 885, 448, 1497, 1700, 134, 765, 1459, 1178],
 [1607,
  740,
  442,
  922,
  1586,
  965,
  635,
  261,
  505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178],
 [1611, 761, 1302, 1252, 1148, 1390, 1588, 659, 1049, 471, 956, 1483, 1178],
 [1638,
  1360,
  707,
  220,
  524,
  830,
  221,
  594,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178],
 [1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178],
 [1652,
  632,
  612,
  1228,
  474,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [1653, 1131, 306, 1039, 1325, 1416, 1459, 1178],
 [1658, 200, 1140, 1673, 775, 293, 1688, 659, 1049, 471, 956, 1483, 1178],
 [1660,
  131,
  1172,
  898,
  1417,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178],
 [1665, 1352, 656, 1175, 74, 477, 765, 1459, 1178],
 [1673, 775, 293, 1688, 659, 1049, 471, 956, 1483, 1178],
 [1688, 659, 1049, 471, 956, 1483, 1178],
 [1690, 1281, 477, 765, 1459, 1178],
 [1699, 264, 1653, 1131, 306, 1039, 1325, 1416, 1459, 1178],
 [1700, 134, 765, 1459, 1178],
 [1703,
  1015,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178],
 [1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [9, 180, 1616, 1578, 215, 1626, 1479],
 [11,
  416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [16,
  445,
  502,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [17, 92, 1008, 412, 1656, 37, 334, 718, 978, 1479],
 [31, 1090, 265, 674, 949, 715, 1587, 1353, 1468, 1479],
 [37, 334, 718, 978, 1479],
 [39, 44, 1580, 1136, 1349, 657, 754, 1512, 303, 1479],
 [40, 17, 92, 1008, 412, 1656, 37, 334, 718, 978, 1479],
 [43, 674, 949, 715, 1587, 1353, 1468, 1479],
 [44, 1580, 1136, 1349, 657, 754, 1512, 303, 1479],
 [69,
  1177,
  995,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [72, 467, 294, 153, 846, 1554, 1283, 1018, 1650, 1618, 1353, 1468, 1479],
 [79,
  711,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [92, 1008, 412, 1656, 37, 334, 718, 978, 1479],
 [93, 871, 577, 1349, 657, 754, 1512, 303, 1479],
 [94,
  1155,
  814,
  1248,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [112,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [117, 666, 1223, 1445, 325, 215, 1626, 1479],
 [137,
  69,
  1177,
  995,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [144,
  214,
  11,
  416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [149,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [153, 846, 1554, 1283, 1018, 1650, 1618, 1353, 1468, 1479],
 [155, 942, 994, 1654, 1634, 497, 400, 666, 1223, 1445, 325, 215, 1626, 1479],
 [164, 254, 1428, 1468, 1479],
 [180, 1616, 1578, 215, 1626, 1479],
 [209,
  94,
  1155,
  814,
  1248,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [214,
  11,
  416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [215, 1626, 1479],
 [226, 490, 671, 253, 657, 754, 1512, 303, 1479],
 [227,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [235,
  502,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [243,
  814,
  1248,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [253, 657, 754, 1512, 303, 1479],
 [254, 1428, 1468, 1479],
 [265, 674, 949, 715, 1587, 1353, 1468, 1479],
 [271,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [294, 153, 846, 1554, 1283, 1018, 1650, 1618, 1353, 1468, 1479],
 [300,
  519,
  732,
  149,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [303, 1479],
 [312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [318, 615, 1046, 671, 253, 657, 754, 1512, 303, 1479],
 [325, 215, 1626, 1479],
 [331, 1678, 1463, 1114, 1650, 1618, 1353, 1468, 1479],
 [333,
  346,
  1674,
  491,
  689,
  797,
  1220,
  546,
  1071,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [334, 718, 978, 1479],
 [338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [346,
  1674,
  491,
  689,
  797,
  1220,
  546,
  1071,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [347,
  300,
  519,
  732,
  149,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [363,
  144,
  214,
  11,
  416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [376,
  626,
  1546,
  1113,
  530,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [389,
  725,
  489,
  1628,
  877,
  1384,
  473,
  563,
  318,
  615,
  1046,
  671,
  253,
  657,
  754,
  1512,
  303,
  1479],
 [391, 226, 490, 671, 253, 657, 754, 1512, 303, 1479],
 [400, 666, 1223, 1445, 325, 215, 1626, 1479],
 [408, 1654, 1634, 497, 400, 666, 1223, 1445, 325, 215, 1626, 1479],
 [412, 1656, 37, 334, 718, 978, 1479],
 [416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [420, 331, 1678, 1463, 1114, 1650, 1618, 1353, 1468, 1479],
 [422, 931, 994, 1654, 1634, 497, 400, 666, 1223, 1445, 325, 215, 1626, 1479],
 [425,
  809,
  1547,
  11,
  416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [426, 1618, 1353, 1468, 1479],
 [431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [445,
  502,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [462, 226, 490, 671, 253, 657, 754, 1512, 303, 1479],
 [465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [467, 294, 153, 846, 1554, 1283, 1018, 1650, 1618, 1353, 1468, 1479],
 [473, 563, 318, 615, 1046, 671, 253, 657, 754, 1512, 303, 1479],
 [476,
  1625,
  973,
  1278,
  920,
  227,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [489,
  1628,
  877,
  1384,
  473,
  563,
  318,
  615,
  1046,
  671,
  253,
  657,
  754,
  1512,
  303,
  1479],
 [490, 671, 253, 657, 754, 1512, 303, 1479],
 [491,
  689,
  797,
  1220,
  546,
  1071,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [496, 855, 978, 1479],
 [497, 400, 666, 1223, 1445, 325, 215, 1626, 1479],
 [502,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [518,
  1174,
  363,
  144,
  214,
  11,
  416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [519,
  732,
  149,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [530,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [532,
  243,
  814,
  1248,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [546,
  1071,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [563, 318, 615, 1046, 671, 253, 657, 754, 1512, 303, 1479],
 [577, 1349, 657, 754, 1512, 303, 1479],
 [608,
  227,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [615, 1046, 671, 253, 657, 754, 1512, 303, 1479],
 [622, 39, 44, 1580, 1136, 1349, 657, 754, 1512, 303, 1479],
 [626,
  1546,
  1113,
  530,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [643,
  1038,
  112,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [657, 754, 1512, 303, 1479],
 [664, 72, 467, 294, 153, 846, 1554, 1283, 1018, 1650, 1618, 1353, 1468, 1479],
 [666, 1223, 1445, 325, 215, 1626, 1479],
 [668,
  1167,
  608,
  227,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [671, 253, 657, 754, 1512, 303, 1479],
 [674, 949, 715, 1587, 1353, 1468, 1479],
 [687,
  1174,
  363,
  144,
  214,
  11,
  416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [689,
  797,
  1220,
  546,
  1071,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [704,
  1102,
  1625,
  973,
  1278,
  920,
  227,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [711,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [715, 1587, 1353, 1468, 1479],
 [716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [718, 978, 1479],
 [725,
  489,
  1628,
  877,
  1384,
  473,
  563,
  318,
  615,
  1046,
  671,
  253,
  657,
  754,
  1512,
  303,
  1479],
 [732,
  149,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [742, 1587, 1353, 1468, 1479],
 [745,
  1066,
  608,
  227,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [754, 1512, 303, 1479],
 [757,
  626,
  1546,
  1113,
  530,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [771,
  333,
  346,
  1674,
  491,
  689,
  797,
  1220,
  546,
  1071,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [797,
  1220,
  546,
  1071,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [803, 1702, 846, 1554, 1283, 1018, 1650, 1618, 1353, 1468, 1479],
 [809,
  1547,
  11,
  416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [814,
  1248,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [823, 43, 674, 949, 715, 1587, 1353, 1468, 1479],
 [828,
  917,
  530,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [846, 1554, 1283, 1018, 1650, 1618, 1353, 1468, 1479],
 [855, 978, 1479],
 [871, 577, 1349, 657, 754, 1512, 303, 1479],
 [872, 1046, 671, 253, 657, 754, 1512, 303, 1479],
 [877, 1384, 473, 563, 318, 615, 1046, 671, 253, 657, 754, 1512, 303, 1479],
 [905,
  1278,
  920,
  227,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [913,
  363,
  144,
  214,
  11,
  416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [917,
  530,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [920,
  227,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [927, 1123, 1451, 1223, 1445, 325, 215, 1626, 1479],
 [928, 1626, 1479],
 [929,
  1066,
  608,
  227,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [931, 994, 1654, 1634, 497, 400, 666, 1223, 1445, 325, 215, 1626, 1479],
 [938,
  1579,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [942, 994, 1654, 1634, 497, 400, 666, 1223, 1445, 325, 215, 1626, 1479],
 [944,
  1354,
  1274,
  1270,
  1181,
  1076,
  408,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [949, 715, 1587, 1353, 1468, 1479],
 [952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [973,
  1278,
  920,
  227,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [978, 1479],
 [987,
  1674,
  491,
  689,
  797,
  1220,
  546,
  1071,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [988,
  1546,
  1113,
  530,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [994, 1654, 1634, 497, 400, 666, 1223, 1445, 325, 215, 1626, 1479],
 [995, 942, 994, 1654, 1634, 497, 400, 666, 1223, 1445, 325, 215, 1626, 1479],
 [1008, 412, 1656, 37, 334, 718, 978, 1479],
 [1018, 1650, 1618, 1353, 1468, 1479],
 [1034, 1662, 426, 1618, 1353, 1468, 1479],
 [1038,
  112,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [1046, 671, 253, 657, 754, 1512, 303, 1479],
 [1066,
  608,
  227,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1067, 153, 846, 1554, 1283, 1018, 1650, 1618, 1353, 1468, 1479],
 [1071,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [1072, 1552, 803, 1702, 846, 1554, 1283, 1018, 1650, 1618, 1353, 1468, 1479],
 [1075,
  1579,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [1076, 408, 1654, 1634, 497, 400, 666, 1223, 1445, 325, 215, 1626, 1479],
 [1090, 265, 674, 949, 715, 1587, 1353, 1468, 1479],
 [1097,
  1210,
  300,
  519,
  732,
  149,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1099,
  209,
  94,
  1155,
  814,
  1248,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1102,
  1625,
  973,
  1278,
  920,
  227,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1110, 872, 1046, 671, 253, 657, 754, 1512, 303, 1479],
 [1113,
  530,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [1114, 1650, 1618, 1353, 1468, 1479],
 [1115, 1552, 803, 1702, 846, 1554, 1283, 1018, 1650, 1618, 1353, 1468, 1479],
 [1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [1121, 1286, 1288, 1618, 1353, 1468, 1479],
 [1123, 1451, 1223, 1445, 325, 215, 1626, 1479],
 [1136, 1349, 657, 754, 1512, 303, 1479],
 [1142, 253, 657, 754, 1512, 303, 1479],
 [1150,
  1354,
  1274,
  1270,
  1181,
  1076,
  408,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1152, 1320, 117, 666, 1223, 1445, 325, 215, 1626, 1479],
 [1155,
  814,
  1248,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1161, 622, 39, 44, 1580, 1136, 1349, 657, 754, 1512, 303, 1479],
 [1167,
  608,
  227,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1169,
  1038,
  112,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [1174,
  363,
  144,
  214,
  11,
  416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [1177,
  995,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1181,
  1076,
  408,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1185,
  1686,
  1248,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1203,
  1177,
  995,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1210,
  300,
  519,
  732,
  149,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1220,
  546,
  1071,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [1223, 1445, 325, 215, 1626, 1479],
 [1224, 1110, 872, 1046, 671, 253, 657, 754, 1512, 303, 1479],
 [1236, 1260, 1678, 1463, 1114, 1650, 1618, 1353, 1468, 1479],
 [1248,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1260, 1678, 1463, 1114, 1650, 1618, 1353, 1468, 1479],
 [1270,
  1181,
  1076,
  408,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1274,
  1270,
  1181,
  1076,
  408,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1278,
  920,
  227,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1283, 1018, 1650, 1618, 1353, 1468, 1479],
 [1286, 1288, 1618, 1353, 1468, 1479],
 [1287, 1445, 325, 215, 1626, 1479],
 [1288, 1618, 1353, 1468, 1479],
 [1320, 117, 666, 1223, 1445, 325, 215, 1626, 1479],
 [1322, 823, 43, 674, 949, 715, 1587, 1353, 1468, 1479],
 [1336, 294, 153, 846, 1554, 1283, 1018, 1650, 1618, 1353, 1468, 1479],
 [1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [1349, 657, 754, 1512, 303, 1479],
 [1353, 1468, 1479],
 [1354,
  1274,
  1270,
  1181,
  1076,
  408,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1368,
  757,
  626,
  1546,
  1113,
  530,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [1370,
  1368,
  757,
  626,
  1546,
  1113,
  530,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [1384, 473, 563, 318, 615, 1046, 671, 253, 657, 754, 1512, 303, 1479],
 [1422,
  1167,
  608,
  227,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1428, 1468, 1479],
 [1442, 412, 1656, 37, 334, 718, 978, 1479],
 [1445, 325, 215, 1626, 1479],
 [1451, 1223, 1445, 325, 215, 1626, 1479],
 [1463, 1114, 1650, 1618, 1353, 1468, 1479],
 [1464, 303, 1479],
 [1468, 1479],
 [1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1512, 303, 1479],
 [1518,
  913,
  363,
  144,
  214,
  11,
  416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [1523, 1463, 1114, 1650, 1618, 1353, 1468, 1479],
 [1546,
  1113,
  530,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [1547,
  11,
  416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [1552, 803, 1702, 846, 1554, 1283, 1018, 1650, 1618, 1353, 1468, 1479],
 [1554, 1283, 1018, 1650, 1618, 1353, 1468, 1479],
 [1577, 1662, 426, 1618, 1353, 1468, 1479],
 [1578, 215, 1626, 1479],
 [1579,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [1580, 1136, 1349, 657, 754, 1512, 303, 1479],
 [1587, 1353, 1468, 1479],
 [1612,
  271,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1614,
  905,
  1278,
  920,
  227,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1616, 1578, 215, 1626, 1479],
 [1618, 1353, 1468, 1479],
 [1625,
  973,
  1278,
  920,
  227,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1626, 1479],
 [1628,
  877,
  1384,
  473,
  563,
  318,
  615,
  1046,
  671,
  253,
  657,
  754,
  1512,
  303,
  1479],
 [1634, 497, 400, 666, 1223, 1445, 325, 215, 1626, 1479],
 [1646, 496, 855, 978, 1479],
 [1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [1650, 1618, 1353, 1468, 1479],
 [1654, 1634, 497, 400, 666, 1223, 1445, 325, 215, 1626, 1479],
 [1656, 37, 334, 718, 978, 1479],
 [1662, 426, 1618, 1353, 1468, 1479],
 [1674,
  491,
  689,
  797,
  1220,
  546,
  1071,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [1678, 1463, 1114, 1650, 1618, 1353, 1468, 1479],
 [1679,
  687,
  1174,
  363,
  144,
  214,
  11,
  416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479],
 [1686,
  1248,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479],
 [1702, 846, 1554, 1283, 1018, 1650, 1618, 1353, 1468, 1479],
 [0, 388, 91, 998, 770, 157, 354, 997, 1590, 136, 963, 785],
 [1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [2,
  923,
  417,
  811,
  867,
  943,
  284,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [3,
  1022,
  403,
  1575,
  1594,
  876,
  962,
  1190,
  1341,
  273,
  581,
  1606,
  132,
  1170,
  104,
  1555,
  1536,
  747,
  1290,
  1411,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [4,
  396,
  696,
  1211,
  772,
  793,
  375,
  191,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [5, 53, 560, 1461, 464, 152, 1516, 963, 785],
 [6,
  579,
  1037,
  818,
  859,
  1154,
  883,
  488,
  567,
  1695,
  912,
  1025,
  850,
  301,
  1232,
  1162,
  993,
  272,
  575,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [7,
  382,
  1490,
  756,
  67,
  1643,
  437,
  692,
  252,
  280,
  281,
  1279,
  844,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [8,
  1063,
  1280,
  81,
  1106,
  650,
  538,
  1098,
  103,
  468,
  1467,
  1585,
  1141,
  1159,
  20,
  418,
  933,
  1671,
  102,
  1630,
  299,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [9, 180, 1616, 1578, 215, 1626, 1479, 82, 1318, 415, 1516, 963, 785],
 [10,
  1001,
  976,
  486,
  700,
  183,
  217,
  1108,
  836,
  1307,
  304,
  1490,
  756,
  67,
  1643,
  437,
  692,
  252,
  280,
  281,
  1279,
  844,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [11,
  416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [12, 460, 276, 645, 99, 560, 1461, 464, 152, 1516, 963, 785],
 [13,
  627,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [14,
  595,
  1337,
  602,
  232,
  1448,
  70,
  1035,
  494,
  600,
  1017,
  1305,
  1348,
  864,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [15,
  1040,
  958,
  1583,
  1241,
  712,
  526,
  590,
  941,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [16,
  445,
  502,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [17,
  92,
  1008,
  412,
  1656,
  37,
  334,
  718,
  978,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [18,
  1235,
  1581,
  1158,
  1553,
  168,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [19,
  177,
  1250,
  1157,
  1054,
  642,
  578,
  652,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [20,
  418,
  933,
  1671,
  102,
  1630,
  299,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [21,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [22,
  1191,
  1641,
  663,
  1430,
  1202,
  411,
  1126,
  57,
  593,
  108,
  1458,
  47,
  1696,
  1675,
  120,
  1531,
  170,
  1000,
  553,
  1581,
  1158,
  1553,
  168,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [23,
  142,
  1471,
  13,
  627,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [24,
  633,
  946,
  1703,
  1015,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [26, 202, 977, 1402, 783, 1296, 739, 82, 1318, 415, 1516, 963, 785],
 [27,
  286,
  591,
  926,
  111,
  567,
  1695,
  912,
  1025,
  850,
  301,
  1232,
  1162,
  993,
  272,
  575,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [28,
  78,
  510,
  544,
  1253,
  1257,
  1415,
  1266,
  125,
  427,
  1381,
  509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [30,
  926,
  111,
  567,
  1695,
  912,
  1025,
  850,
  301,
  1232,
  1162,
  993,
  272,
  575,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [31,
  1090,
  265,
  674,
  949,
  715,
  1587,
  1353,
  1468,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [32,
  816,
  402,
  1441,
  1332,
  971,
  1244,
  1007,
  22,
  1191,
  1641,
  663,
  1430,
  1202,
  411,
  1126,
  57,
  593,
  108,
  1458,
  47,
  1696,
  1675,
  120,
  1531,
  170,
  1000,
  553,
  1581,
  1158,
  1553,
  168,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [33,
  379,
  1447,
  1009,
  1699,
  264,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [34,
  1504,
  1192,
  482,
  768,
  7,
  382,
  1490,
  756,
  67,
  1643,
  437,
  692,
  252,
  280,
  281,
  1279,
  844,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [35,
  625,
  15,
  1040,
  958,
  1583,
  1241,
  712,
  526,
  590,
  941,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [36,
  195,
  1383,
  598,
  969,
  1605,
  1249,
  625,
  15,
  1040,
  958,
  1583,
  1241,
  712,
  526,
  590,
  941,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [37, 334, 718, 978, 1479, 82, 1318, 415, 1516, 963, 785],
 [38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [39,
  44,
  1580,
  1136,
  1349,
  657,
  754,
  1512,
  303,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [40,
  17,
  92,
  1008,
  412,
  1656,
  37,
  334,
  718,
  978,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [41,
  359,
  107,
  1093,
  624,
  1435,
  573,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [42,
  484,
  1592,
  1365,
  1603,
  372,
  452,
  332,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [43, 674, 949, 715, 1587, 1353, 1468, 1479, 82, 1318, 415, 1516, 963, 785],
 [44,
  1580,
  1136,
  1349,
  657,
  754,
  1512,
  303,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [45,
  222,
  1425,
  197,
  779,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [46,
  688,
  1273,
  1668,
  570,
  980,
  1472,
  457,
  163,
  1592,
  1365,
  1603,
  372,
  452,
  332,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [47,
  1696,
  1675,
  120,
  1531,
  170,
  1000,
  553,
  1581,
  1158,
  1553,
  168,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [50, 1153, 194, 0, 388, 91, 998, 770, 157, 354, 997, 1590, 136, 963, 785],
 [51,
  1344,
  840,
  838,
  165,
  670,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [52,
  791,
  1640,
  207,
  1525,
  1505,
  852,
  225,
  1584,
  1500,
  5,
  53,
  560,
  1461,
  464,
  152,
  1516,
  963,
  785],
 [53, 560, 1461, 464, 152, 1516, 963, 785],
 [54,
  1247,
  1443,
  869,
  1154,
  883,
  488,
  567,
  1695,
  912,
  1025,
  850,
  301,
  1232,
  1162,
  993,
  272,
  575,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [55,
  983,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [56,
  1670,
  1262,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [57,
  593,
  108,
  1458,
  47,
  1696,
  1675,
  120,
  1531,
  170,
  1000,
  553,
  1581,
  1158,
  1553,
  168,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [58,
  1567,
  1112,
  1219,
  1051,
  1154,
  883,
  488,
  567,
  1695,
  912,
  1025,
  850,
  301,
  1232,
  1162,
  993,
  272,
  575,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [60,
  1462,
  451,
  1555,
  1536,
  747,
  1290,
  1411,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [61,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [62,
  1077,
  109,
  539,
  1589,
  328,
  309,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [63,
  7,
  382,
  1490,
  756,
  67,
  1643,
  437,
  692,
  252,
  280,
  281,
  1279,
  844,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [64,
  1419,
  449,
  1664,
  387,
  1669,
  1507,
  492,
  1394,
  537,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [65,
  1527,
  947,
  1357,
  224,
  990,
  148,
  661,
  1481,
  1488,
  1120,
  516,
  1157,
  1054,
  642,
  578,
  652,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [66, 166, 566, 1533, 336, 778, 560, 1461, 464, 152, 1516, 963, 785],
 [67,
  1643,
  437,
  692,
  252,
  280,
  281,
  1279,
  844,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [68,
  1225,
  964,
  87,
  782,
  648,
  935,
  1217,
  578,
  652,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [69,
  1177,
  995,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [70,
  1035,
  494,
  600,
  1017,
  1305,
  1348,
  864,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [72,
  467,
  294,
  153,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [73,
  1163,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [74,
  477,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [75,
  100,
  527,
  993,
  272,
  575,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [76,
  480,
  875,
  1663,
  1011,
  1165,
  1313,
  184,
  640,
  139,
  1624,
  140,
  837,
  557,
  977,
  1402,
  783,
  1296,
  739,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [77,
  470,
  1537,
  907,
  73,
  1163,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [78,
  510,
  544,
  1253,
  1257,
  1415,
  1266,
  125,
  427,
  1381,
  509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [79,
  711,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [80,
  1473,
  463,
  1277,
  1397,
  751,
  1705,
  1221,
  299,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [81,
  1106,
  650,
  538,
  1098,
  103,
  468,
  1467,
  1585,
  1141,
  1159,
  20,
  418,
  933,
  1671,
  102,
  1630,
  299,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [82, 1318, 415, 1516, 963, 785],
 [83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [84,
  1364,
  231,
  42,
  484,
  1592,
  1365,
  1603,
  372,
  452,
  332,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [85,
  177,
  1250,
  1157,
  1054,
  642,
  578,
  652,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [86,
  749,
  960,
  1290,
  1411,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [87,
  782,
  648,
  935,
  1217,
  578,
  652,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [88,
  506,
  826,
  893,
  1261,
  1345,
  1026,
  523,
  1388,
  1197,
  1193,
  1454,
  1472,
  457,
  163,
  1592,
  1365,
  1603,
  372,
  452,
  332,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [89,
  335,
  1199,
  613,
  941,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [90,
  1473,
  463,
  1277,
  1397,
  751,
  1705,
  1221,
  299,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [91, 998, 770, 157, 354, 997, 1590, 136, 963, 785],
 [92, 1008, 412, 1656, 37, 334, 718, 978, 1479, 82, 1318, 415, 1516, 963, 785],
 [93,
  871,
  577,
  1349,
  657,
  754,
  1512,
  303,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [94,
  1155,
  814,
  1248,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [96,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [97,
  1377,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [99, 560, 1461, 464, 152, 1516, 963, 785],
 [100,
  527,
  993,
  272,
  575,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [101,
  277,
  1045,
  1209,
  475,
  1668,
  570,
  980,
  1472,
  457,
  163,
  1592,
  1365,
  1603,
  372,
  452,
  332,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [102,
  1630,
  299,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [103,
  468,
  1467,
  1585,
  1141,
  1159,
  20,
  418,
  933,
  1671,
  102,
  1630,
  299,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [104,
  1555,
  1536,
  747,
  1290,
  1411,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [105,
  1655,
  1284,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [106,
  832,
  989,
  1557,
  266,
  469,
  644,
  1268,
  5,
  53,
  560,
  1461,
  464,
  152,
  1516,
  963,
  785],
 [107,
  1093,
  624,
  1435,
  573,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [108,
  1458,
  47,
  1696,
  1675,
  120,
  1531,
  170,
  1000,
  553,
  1581,
  1158,
  1553,
  168,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [109,
  539,
  1589,
  328,
  309,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [110,
  1613,
  1311,
  1117,
  1485,
  805,
  951,
  1337,
  602,
  232,
  1448,
  70,
  1035,
  494,
  600,
  1017,
  1305,
  1348,
  864,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [111,
  567,
  1695,
  912,
  1025,
  850,
  301,
  1232,
  1162,
  993,
  272,
  575,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [112,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [113,
  1010,
  66,
  166,
  566,
  1533,
  336,
  778,
  560,
  1461,
  464,
  152,
  1516,
  963,
  785],
 [114,
  432,
  314,
  221,
  594,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [115, 327, 1502, 1316, 316, 1138, 1314, 157, 354, 997, 1590, 136, 963, 785],
 [116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [117, 666, 1223, 1445, 325, 215, 1626, 1479, 82, 1318, 415, 1516, 963, 785],
 [118, 469, 644, 1268, 5, 53, 560, 1461, 464, 152, 1516, 963, 785],
 [119,
  305,
  390,
  1061,
  206,
  403,
  1575,
  1594,
  876,
  962,
  1190,
  1341,
  273,
  581,
  1606,
  132,
  1170,
  104,
  1555,
  1536,
  747,
  1290,
  1411,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [120,
  1531,
  170,
  1000,
  553,
  1581,
  1158,
  1553,
  168,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [121,
  296,
  753,
  1024,
  919,
  695,
  1570,
  887,
  866,
  19,
  177,
  1250,
  1157,
  1054,
  642,
  578,
  652,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [123,
  498,
  1589,
  328,
  309,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [124,
  358,
  521,
  488,
  567,
  1695,
  912,
  1025,
  850,
  301,
  1232,
  1162,
  993,
  272,
  575,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [125,
  427,
  1381,
  509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [126, 660, 196, 12, 460, 276, 645, 99, 560, 1461, 464, 152, 1516, 963, 785],
 [127,
  458,
  435,
  109,
  539,
  1589,
  328,
  309,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [129,
  1063,
  1280,
  81,
  1106,
  650,
  538,
  1098,
  103,
  468,
  1467,
  1585,
  1141,
  1159,
  20,
  418,
  933,
  1671,
  102,
  1630,
  299,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [130,
  1477,
  817,
  423,
  311,
  1409,
  1704,
  743,
  1403,
  289,
  733,
  854,
  1129,
  443,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [131,
  1172,
  898,
  1417,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [132,
  1170,
  104,
  1555,
  1536,
  747,
  1290,
  1411,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [133,
  369,
  1019,
  238,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [134,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [135,
  3,
  1022,
  403,
  1575,
  1594,
  876,
  962,
  1190,
  1341,
  273,
  581,
  1606,
  132,
  1170,
  104,
  1555,
  1536,
  747,
  1290,
  1411,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [136, 963, 785],
 [137,
  69,
  1177,
  995,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [138, 906, 908, 968, 892, 880, 354, 997, 1590, 136, 963, 785],
 [139,
  1624,
  140,
  837,
  557,
  977,
  1402,
  783,
  1296,
  739,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [140, 837, 557, 977, 1402, 783, 1296, 739, 82, 1318, 415, 1516, 963, 785],
 [141,
  894,
  322,
  14,
  595,
  1337,
  602,
  232,
  1448,
  70,
  1035,
  494,
  600,
  1017,
  1305,
  1348,
  864,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [142,
  1471,
  13,
  627,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [143,
  6,
  579,
  1037,
  818,
  859,
  1154,
  883,
  488,
  567,
  1695,
  912,
  1025,
  850,
  301,
  1232,
  1162,
  993,
  272,
  575,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [144,
  214,
  11,
  416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [145,
  539,
  1589,
  328,
  309,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [147,
  1163,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [148,
  661,
  1481,
  1488,
  1120,
  516,
  1157,
  1054,
  642,
  578,
  652,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [149,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [150,
  61,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [151,
  1489,
  1078,
  735,
  1667,
  229,
  280,
  281,
  1279,
  844,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [152, 1516, 963, 785],
 [153,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [156,
  1439,
  130,
  1477,
  817,
  423,
  311,
  1409,
  1704,
  743,
  1403,
  289,
  733,
  854,
  1129,
  443,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [157, 354, 997, 1590, 136, 963, 785],
 [158, 690, 638, 1060, 1608, 1534, 785],
 [159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [160, 619, 316, 1138, 1314, 157, 354, 997, 1590, 136, 963, 785],
 [161,
  1430,
  1202,
  411,
  1126,
  57,
  593,
  108,
  1458,
  47,
  1696,
  1675,
  120,
  1531,
  170,
  1000,
  553,
  1581,
  1158,
  1553,
  168,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [162,
  308,
  841,
  543,
  1501,
  873,
  1011,
  1165,
  1313,
  184,
  640,
  139,
  1624,
  140,
  837,
  557,
  977,
  1402,
  783,
  1296,
  739,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [163,
  1592,
  1365,
  1603,
  372,
  452,
  332,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [164, 254, 1428, 1468, 1479, 82, 1318, 415, 1516, 963, 785],
 [165,
  670,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [166, 566, 1533, 336, 778, 560, 1461, 464, 152, 1516, 963, 785],
 [167,
  23,
  142,
  1471,
  13,
  627,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [168, 908, 968, 892, 880, 354, 997, 1590, 136, 963, 785],
 [169,
  1271,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [170,
  1000,
  553,
  1581,
  1158,
  1553,
  168,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [171,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [172,
  1165,
  1313,
  184,
  640,
  139,
  1624,
  140,
  837,
  557,
  977,
  1402,
  783,
  1296,
  739,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [173,
  374,
  239,
  1050,
  247,
  1027,
  326,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [175, 796, 1553, 168, 908, 968, 892, 880, 354, 997, 1590, 136, 963, 785],
 [176,
  847,
  184,
  640,
  139,
  1624,
  140,
  837,
  557,
  977,
  1402,
  783,
  1296,
  739,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [177,
  1250,
  1157,
  1054,
  642,
  578,
  652,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [178,
  1258,
  1189,
  1119,
  449,
  1664,
  387,
  1669,
  1507,
  492,
  1394,
  537,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [179,
  1326,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [180, 1616, 1578, 215, 1626, 1479, 82, 1318, 415, 1516, 963, 785],
 [181,
  1284,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [182,
  1321,
  1375,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [183,
  217,
  1108,
  836,
  1307,
  304,
  1490,
  756,
  67,
  1643,
  437,
  692,
  252,
  280,
  281,
  1279,
  844,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [184,
  640,
  139,
  1624,
  140,
  837,
  557,
  977,
  1402,
  783,
  1296,
  739,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [185,
  1015,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [186,
  1541,
  1460,
  480,
  875,
  1663,
  1011,
  1165,
  1313,
  184,
  640,
  139,
  1624,
  140,
  837,
  557,
  977,
  1402,
  783,
  1296,
  739,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [187,
  1064,
  807,
  1520,
  1145,
  572,
  169,
  1271,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [188,
  1499,
  1484,
  370,
  450,
  1636,
  242,
  662,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [189,
  1408,
  819,
  582,
  66,
  166,
  566,
  1533,
  336,
  778,
  560,
  1461,
  464,
  152,
  1516,
  963,
  785],
 [190,
  746,
  1557,
  266,
  469,
  644,
  1268,
  5,
  53,
  560,
  1461,
  464,
  152,
  1516,
  963,
  785],
 [191,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [192,
  1677,
  30,
  926,
  111,
  567,
  1695,
  912,
  1025,
  850,
  301,
  1232,
  1162,
  993,
  272,
  575,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [193,
  1256,
  361,
  660,
  196,
  12,
  460,
  276,
  645,
  99,
  560,
  1461,
  464,
  152,
  1516,
  963,
  785],
 [194, 0, 388, 91, 998, 770, 157, 354, 997, 1590, 136, 963, 785],
 [195,
  1383,
  598,
  969,
  1605,
  1249,
  625,
  15,
  1040,
  958,
  1583,
  1241,
  712,
  526,
  590,
  941,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [196, 12, 460, 276, 645, 99, 560, 1461, 464, 152, 1516, 963, 785],
 [197,
  779,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [199,
  1334,
  1396,
  61,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [200,
  1140,
  1673,
  775,
  293,
  1688,
  659,
  1049,
  471,
  956,
  1483,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [201,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [202, 977, 1402, 783, 1296, 739, 82, 1318, 415, 1516, 963, 785],
 [203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [204,
  1631,
  587,
  440,
  1176,
  606,
  985,
  710,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [206,
  403,
  1575,
  1594,
  876,
  962,
  1190,
  1341,
  273,
  581,
  1606,
  132,
  1170,
  104,
  1555,
  1536,
  747,
  1290,
  1411,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [207,
  1525,
  1505,
  852,
  225,
  1584,
  1500,
  5,
  53,
  560,
  1461,
  464,
  152,
  1516,
  963,
  785],
 [208,
  915,
  127,
  458,
  435,
  109,
  539,
  1589,
  328,
  309,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [209,
  94,
  1155,
  814,
  1248,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [210,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [211,
  181,
  1284,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [212, 138, 906, 908, 968, 892, 880, 354, 997, 1590, 136, 963, 785],
 [213,
  321,
  45,
  222,
  1425,
  197,
  779,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [214,
  11,
  416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [215, 1626, 1479, 82, 1318, 415, 1516, 963, 785],
 [216,
  789,
  816,
  402,
  1441,
  1332,
  971,
  1244,
  1007,
  22,
  1191,
  1641,
  663,
  1430,
  1202,
  411,
  1126,
  57,
  593,
  108,
  1458,
  47,
  1696,
  1675,
  120,
  1531,
  170,
  1000,
  553,
  1581,
  1158,
  1553,
  168,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [217,
  1108,
  836,
  1307,
  304,
  1490,
  756,
  67,
  1643,
  437,
  692,
  252,
  280,
  281,
  1279,
  844,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [218,
  1541,
  1460,
  480,
  875,
  1663,
  1011,
  1165,
  1313,
  184,
  640,
  139,
  1624,
  140,
  837,
  557,
  977,
  1402,
  783,
  1296,
  739,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [219,
  1522,
  582,
  66,
  166,
  566,
  1533,
  336,
  778,
  560,
  1461,
  464,
  152,
  1516,
  963,
  785],
 [220,
  524,
  830,
  221,
  594,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [221,
  594,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [222,
  1425,
  197,
  779,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [223,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [224,
  990,
  148,
  661,
  1481,
  1488,
  1120,
  516,
  1157,
  1054,
  642,
  578,
  652,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [225, 1584, 1500, 5, 53, 560, 1461, 464, 152, 1516, 963, 785],
 [226,
  490,
  671,
  253,
  657,
  754,
  1512,
  303,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [227,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [228,
  1171,
  1503,
  1338,
  1694,
  602,
  232,
  1448,
  70,
  1035,
  494,
  600,
  1017,
  1305,
  1348,
  864,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [229,
  280,
  281,
  1279,
  844,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [230,
  178,
  1258,
  1189,
  1119,
  449,
  1664,
  387,
  1669,
  1507,
  492,
  1394,
  537,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [231,
  42,
  484,
  1592,
  1365,
  1603,
  372,
  452,
  332,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [232,
  1448,
  70,
  1035,
  494,
  600,
  1017,
  1305,
  1348,
  864,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [233,
  1696,
  1675,
  120,
  1531,
  170,
  1000,
  553,
  1581,
  1158,
  1553,
  168,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [234,
  1298,
  966,
  1620,
  278,
  105,
  1655,
  1284,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [235,
  502,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [236,
  843,
  658,
  1276,
  404,
  1542,
  717,
  520,
  47,
  1696,
  1675,
  120,
  1531,
  170,
  1000,
  553,
  1581,
  1158,
  1553,
  168,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [237,
  1010,
  66,
  166,
  566,
  1533,
  336,
  778,
  560,
  1461,
  464,
  152,
  1516,
  963,
  785],
 [238,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [239,
  1050,
  247,
  1027,
  326,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [240,
  1344,
  840,
  838,
  165,
  670,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [241,
  1503,
  1338,
  1694,
  602,
  232,
  1448,
  70,
  1035,
  494,
  600,
  1017,
  1305,
  1348,
  864,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [242,
  662,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [243,
  814,
  1248,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [245,
  1189,
  1119,
  449,
  1664,
  387,
  1669,
  1507,
  492,
  1394,
  537,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [247,
  1027,
  326,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [248,
  525,
  1298,
  966,
  1620,
  278,
  105,
  1655,
  1284,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [249,
  505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [250,
  450,
  1636,
  242,
  662,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [251,
  537,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [252,
  280,
  281,
  1279,
  844,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [253, 657, 754, 1512, 303, 1479, 82, 1318, 415, 1516, 963, 785],
 [254, 1428, 1468, 1479, 82, 1318, 415, 1516, 963, 785],
 [255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [256,
  1540,
  832,
  989,
  1557,
  266,
  469,
  644,
  1268,
  5,
  53,
  560,
  1461,
  464,
  152,
  1516,
  963,
  785],
 [257,
  1013,
  523,
  1388,
  1197,
  1193,
  1454,
  1472,
  457,
  163,
  1592,
  1365,
  1603,
  372,
  452,
  332,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [258,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [259,
  1497,
  1700,
  134,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [260,
  1205,
  1427,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [261,
  505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [262,
  940,
  1683,
  1104,
  1462,
  451,
  1555,
  1536,
  747,
  1290,
  1411,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [263,
  712,
  526,
  590,
  941,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [264,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [265, 674, 949, 715, 1587, 1353, 1468, 1479, 82, 1318, 415, 1516, 963, 785],
 [266, 469, 644, 1268, 5, 53, 560, 1461, 464, 152, 1516, 963, 785],
 [267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [269,
  169,
  1271,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [270,
  683,
  1265,
  1226,
  673,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [271,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [272,
  575,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [273,
  581,
  1606,
  132,
  1170,
  104,
  1555,
  1536,
  747,
  1290,
  1411,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [274,
  1226,
  673,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [275,
  1191,
  1641,
  663,
  1430,
  1202,
  411,
  1126,
  57,
  593,
  108,
  1458,
  47,
  1696,
  1675,
  120,
  1531,
  170,
  1000,
  553,
  1581,
  1158,
  1553,
  168,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [276, 645, 99, 560, 1461, 464, 152, 1516, 963, 785],
 [277,
  1045,
  1209,
  475,
  1668,
  570,
  980,
  1472,
  457,
  163,
  1592,
  1365,
  1603,
  372,
  452,
  332,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [278,
  105,
  1655,
  1284,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [279,
  260,
  1205,
  1427,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [280,
  281,
  1279,
  844,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [281,
  1279,
  844,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [282,
  1467,
  1585,
  1141,
  1159,
  20,
  418,
  933,
  1671,
  102,
  1630,
  299,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [283,
  972,
  1355,
  1062,
  131,
  1172,
  898,
  1417,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [284,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [285,
  1591,
  371,
  142,
  1471,
  13,
  627,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [286,
  591,
  926,
  111,
  567,
  1695,
  912,
  1025,
  850,
  301,
  1232,
  1162,
  993,
  272,
  575,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [287,
  1059,
  789,
  816,
  402,
  1441,
  1332,
  971,
  1244,
  1007,
  22,
  1191,
  1641,
  663,
  1430,
  1202,
  411,
  1126,
  57,
  593,
  108,
  1458,
  47,
  1696,
  1675,
  120,
  1531,
  170,
  1000,
  553,
  1581,
  1158,
  1553,
  168,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [289,
  733,
  854,
  1129,
  443,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [290,
  1222,
  1371,
  781,
  499,
  865,
  883,
  488,
  567,
  1695,
  912,
  1025,
  850,
  301,
  1232,
  1162,
  993,
  272,
  575,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [291,
  1565,
  379,
  1447,
  1009,
  1699,
  264,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [292,
  1466,
  263,
  712,
  526,
  590,
  941,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [293,
  1688,
  659,
  1049,
  471,
  956,
  1483,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [294,
  153,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [295,
  310,
  386,
  845,
  279,
  260,
  1205,
  1427,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [296,
  753,
  1024,
  919,
  695,
  1570,
  887,
  866,
  19,
  177,
  1250,
  1157,
  1054,
  642,
  578,
  652,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [297,
  554,
  999,
  1574,
  1672,
  794,
  552,
  292,
  1466,
  263,
  712,
  526,
  590,
  941,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [298,
  1272,
  1302,
  1252,
  1148,
  1390,
  1588,
  659,
  1049,
  471,
  956,
  1483,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [299, 1386, 501, 522, 968, 892, 880, 354, 997, 1590, 136, 963, 785],
 [300,
  519,
  732,
  149,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [301,
  1232,
  1162,
  993,
  272,
  575,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [302,
  89,
  335,
  1199,
  613,
  941,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [303, 1479, 82, 1318, 415, 1516, 963, 785],
 [304,
  1490,
  756,
  67,
  1643,
  437,
  692,
  252,
  280,
  281,
  1279,
  844,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [305,
  390,
  1061,
  206,
  403,
  1575,
  1594,
  876,
  962,
  1190,
  1341,
  273,
  581,
  1606,
  132,
  1170,
  104,
  1555,
  1536,
  747,
  1290,
  1411,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [306,
  1039,
  1325,
  1416,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [307,
  201,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [308,
  841,
  543,
  1501,
  873,
  1011,
  1165,
  1313,
  184,
  640,
  139,
  1624,
  140,
  837,
  557,
  977,
  1402,
  783,
  1296,
  739,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [309,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [310,
  386,
  845,
  279,
  260,
  1205,
  1427,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [311,
  1409,
  1704,
  743,
  1403,
  289,
  733,
  854,
  1129,
  443,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [314,
  221,
  594,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [315,
  1478,
  1594,
  876,
  962,
  1190,
  1341,
  273,
  581,
  1606,
  132,
  1170,
  104,
  1555,
  1536,
  747,
  1290,
  1411,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [316, 1138, 1314, 157, 354, 997, 1590, 136, 963, 785],
 [317,
  773,
  890,
  1610,
  614,
  1623,
  189,
  1408,
  819,
  582,
  66,
  166,
  566,
  1533,
  336,
  778,
  560,
  1461,
  464,
  152,
  1516,
  963,
  785],
 [318,
  615,
  1046,
  671,
  253,
  657,
  754,
  1512,
  303,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [319,
  930,
  1230,
  295,
  310,
  386,
  845,
  279,
  260,
  1205,
  1427,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [320,
  181,
  1284,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [321,
  45,
  222,
  1425,
  197,
  779,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [322,
  14,
  595,
  1337,
  602,
  232,
  1448,
  70,
  1035,
  494,
  600,
  1017,
  1305,
  1348,
  864,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [325, 215, 1626, 1479, 82, 1318, 415, 1516, 963, 785],
 [326,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [327, 1502, 1316, 316, 1138, 1314, 157, 354, 997, 1590, 136, 963, 785],
 [328,
  309,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [329,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [330,
  504,
  223,
  1398,
  921,
  71,
  705,
  1085,
  1437,
  1175,
  74,
  477,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [331,
  1678,
  1463,
  1114,
  1650,
  1618,
  1353,
  1468,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [332, 892, 880, 354, 997, 1590, 136, 963, 785],
 [333,
  346,
  1674,
  491,
  689,
  797,
  1220,
  546,
  1071,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [334, 718, 978, 1479, 82, 1318, 415, 1516, 963, 785],
 [335,
  1199,
  613,
  941,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [336, 778, 560, 1461, 464, 152, 1516, 963, 785],
 [337,
  804,
  1456,
  1165,
  1313,
  184,
  640,
  139,
  1624,
  140,
  837,
  557,
  977,
  1402,
  783,
  1296,
  739,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [339,
  1395,
  1051,
  1154,
  883,
  488,
  567,
  1695,
  912,
  1025,
  850,
  301,
  1232,
  1162,
  993,
  272,
  575,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [341, 1179, 1372, 906, 908, 968, 892, 880, 354, 997, 1590, 136, 963, 785],
 [342,
  713,
  1122,
  1346,
  60,
  1462,
  451,
  1555,
  1536,
  747,
  1290,
  1411,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [343,
  1139,
  870,
  357,
  737,
  1644,
  55,
  983,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [344,
  1425,
  197,
  779,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [345,
  1252,
  1148,
  1390,
  1588,
  659,
  1049,
  471,
  956,
  1483,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [346,
  1674,
  491,
  689,
  797,
  1220,
  546,
  1071,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [347,
  300,
  519,
  732,
  149,
  714,
  1358,
  312,
  338,
  716,
  83,
  465,
  146,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [348,
  1560,
  925,
  1009,
  1699,
  264,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [349,
  409,
  15,
  1040,
  958,
  1583,
  1241,
  712,
  526,
  590,
  941,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [350,
  247,
  1027,
  326,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [351,
  746,
  1557,
  266,
  469,
  644,
  1268,
  5,
  53,
  560,
  1461,
  464,
  152,
  1516,
  963,
  785],
 [352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [353,
  1434,
  585,
  46,
  688,
  1273,
  1668,
  570,
  980,
  1472,
  457,
  163,
  1592,
  1365,
  1603,
  372,
  452,
  332,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [354, 997, 1590, 136, 963, 785],
 [355,
  1482,
  1382,
  1605,
  1249,
  625,
  15,
  1040,
  958,
  1583,
  1241,
  712,
  526,
  590,
  941,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [356,
  1215,
  1149,
  1439,
  130,
  1477,
  817,
  423,
  311,
  1409,
  1704,
  743,
  1403,
  289,
  733,
  854,
  1129,
  443,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [357,
  737,
  1644,
  55,
  983,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [358,
  521,
  488,
  567,
  1695,
  912,
  1025,
  850,
  301,
  1232,
  1162,
  993,
  272,
  575,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [359,
  107,
  1093,
  624,
  1435,
  573,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [360,
  1132,
  850,
  301,
  1232,
  1162,
  993,
  272,
  575,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [361, 660, 196, 12, 460, 276, 645, 99, 560, 1461, 464, 152, 1516, 963, 785],
 [362,
  1239,
  248,
  525,
  1298,
  966,
  1620,
  278,
  105,
  1655,
  1284,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [363,
  144,
  214,
  11,
  416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [364,
  831,
  1329,
  827,
  1107,
  147,
  1163,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [365,
  976,
  486,
  700,
  183,
  217,
  1108,
  836,
  1307,
  304,
  1490,
  756,
  67,
  1643,
  437,
  692,
  252,
  280,
  281,
  1279,
  844,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [366, 1502, 1316, 316, 1138, 1314, 157, 354, 997, 1590, 136, 963, 785],
 [367,
  247,
  1027,
  326,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [368,
  605,
  767,
  1151,
  903,
  97,
  1377,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [369,
  1019,
  238,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [370,
  450,
  1636,
  242,
  662,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [371,
  142,
  1471,
  13,
  627,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [372, 452, 332, 892, 880, 354, 997, 1590, 136, 963, 785],
 [373,
  239,
  1050,
  247,
  1027,
  326,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [374,
  239,
  1050,
  247,
  1027,
  326,
  246,
  1125,
  1331,
  815,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [375,
  191,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [376,
  626,
  1546,
  1113,
  530,
  1,
  431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [377,
  1044,
  1467,
  1585,
  1141,
  1159,
  20,
  418,
  933,
  1671,
  102,
  1630,
  299,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [378,
  549,
  825,
  222,
  1425,
  197,
  779,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [379,
  1447,
  1009,
  1699,
  264,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [381,
  910,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [382,
  1490,
  756,
  67,
  1643,
  437,
  692,
  252,
  280,
  281,
  1279,
  844,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [383,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [384,
  726,
  1280,
  81,
  1106,
  650,
  538,
  1098,
  103,
  468,
  1467,
  1585,
  1141,
  1159,
  20,
  418,
  933,
  1671,
  102,
  1630,
  299,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [385,
  1619,
  1020,
  433,
  337,
  804,
  1456,
  1165,
  1313,
  184,
  640,
  139,
  1624,
  140,
  837,
  557,
  977,
  1402,
  783,
  1296,
  739,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [386,
  845,
  279,
  260,
  1205,
  1427,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [387,
  1669,
  1507,
  492,
  1394,
  537,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [388, 91, 998, 770, 157, 354, 997, 1590, 136, 963, 785],
 [389,
  725,
  489,
  1628,
  877,
  1384,
  473,
  563,
  318,
  615,
  1046,
  671,
  253,
  657,
  754,
  1512,
  303,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [390,
  1061,
  206,
  403,
  1575,
  1594,
  876,
  962,
  1190,
  1341,
  273,
  581,
  1606,
  132,
  1170,
  104,
  1555,
  1536,
  747,
  1290,
  1411,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [391,
  226,
  490,
  671,
  253,
  657,
  754,
  1512,
  303,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [394,
  746,
  1557,
  266,
  469,
  644,
  1268,
  5,
  53,
  560,
  1461,
  464,
  152,
  1516,
  963,
  785],
 [395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [396,
  696,
  1211,
  772,
  793,
  375,
  191,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [397,
  1041,
  881,
  1064,
  807,
  1520,
  1145,
  572,
  169,
  1271,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [399,
  1243,
  1566,
  762,
  1235,
  1581,
  1158,
  1553,
  168,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [400, 666, 1223, 1445, 325, 215, 1626, 1479, 82, 1318, 415, 1516, 963, 785],
 [401,
  731,
  51,
  1344,
  840,
  838,
  165,
  670,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [402,
  1441,
  1332,
  971,
  1244,
  1007,
  22,
  1191,
  1641,
  663,
  1430,
  1202,
  411,
  1126,
  57,
  593,
  108,
  1458,
  47,
  1696,
  1675,
  120,
  1531,
  170,
  1000,
  553,
  1581,
  1158,
  1553,
  168,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [403,
  1575,
  1594,
  876,
  962,
  1190,
  1341,
  273,
  581,
  1606,
  132,
  1170,
  104,
  1555,
  1536,
  747,
  1290,
  1411,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [404,
  1542,
  717,
  520,
  47,
  1696,
  1675,
  120,
  1531,
  170,
  1000,
  553,
  1581,
  1158,
  1553,
  168,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [405,
  1691,
  365,
  976,
  486,
  700,
  183,
  217,
  1108,
  836,
  1307,
  304,
  1490,
  756,
  67,
  1643,
  437,
  692,
  252,
  280,
  281,
  1279,
  844,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [406,
  1401,
  1388,
  1197,
  1193,
  1454,
  1472,
  457,
  163,
  1592,
  1365,
  1603,
  372,
  452,
  332,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [407,
  510,
  544,
  1253,
  1257,
  1415,
  1266,
  125,
  427,
  1381,
  509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [408,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [409,
  15,
  1040,
  958,
  1583,
  1241,
  712,
  526,
  590,
  941,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [410,
  429,
  85,
  177,
  1250,
  1157,
  1054,
  642,
  578,
  652,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [411,
  1126,
  57,
  593,
  108,
  1458,
  47,
  1696,
  1675,
  120,
  1531,
  170,
  1000,
  553,
  1581,
  1158,
  1553,
  168,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [412, 1656, 37, 334, 718, 978, 1479, 82, 1318, 415, 1516, 963, 785],
 [413,
  84,
  1364,
  231,
  42,
  484,
  1592,
  1365,
  1603,
  372,
  452,
  332,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [414,
  1644,
  55,
  983,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [415, 1516, 963, 785],
 [416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [417,
  811,
  867,
  943,
  284,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [418,
  933,
  1671,
  102,
  1630,
  299,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [419,
  1045,
  1209,
  475,
  1668,
  570,
  980,
  1472,
  457,
  163,
  1592,
  1365,
  1603,
  372,
  452,
  332,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [420,
  331,
  1678,
  1463,
  1114,
  1650,
  1618,
  1353,
  1468,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [421,
  114,
  432,
  314,
  221,
  594,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [422,
  931,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [423,
  311,
  1409,
  1704,
  743,
  1403,
  289,
  733,
  854,
  1129,
  443,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [424, 1100, 798, 452, 332, 892, 880, 354, 997, 1590, 136, 963, 785],
 [425,
  809,
  1547,
  11,
  416,
  1648,
  1115,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [426, 1618, 1353, 1468, 1479, 82, 1318, 415, 1516, 963, 785],
 [427,
  1381,
  509,
  1642,
  932,
  623,
  29,
  1036,
  551,
  970,
  198,
  1653,
  1131,
  306,
  1039,
  1325,
  1416,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [428,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [429,
  85,
  177,
  1250,
  1157,
  1054,
  642,
  578,
  652,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [430,
  682,
  647,
  1306,
  584,
  574,
  1521,
  1300,
  80,
  1473,
  463,
  1277,
  1397,
  751,
  1705,
  1221,
  299,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [431,
  1116,
  795,
  1342,
  1072,
  1552,
  803,
  1702,
  846,
  1554,
  1283,
  1018,
  1650,
  1618,
  1353,
  1468,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [432,
  314,
  221,
  594,
  1309,
  755,
  1293,
  628,
  1690,
  1281,
  477,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [433,
  337,
  804,
  1456,
  1165,
  1313,
  184,
  640,
  139,
  1624,
  140,
  837,
  557,
  977,
  1402,
  783,
  1296,
  739,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [435,
  109,
  539,
  1589,
  328,
  309,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [436,
  495,
  1312,
  1003,
  1048,
  245,
  1189,
  1119,
  449,
  1664,
  387,
  1669,
  1507,
  492,
  1394,
  537,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [437,
  692,
  252,
  280,
  281,
  1279,
  844,
  461,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [438,
  297,
  554,
  999,
  1574,
  1672,
  794,
  552,
  292,
  1466,
  263,
  712,
  526,
  590,
  941,
  750,
  900,
  1496,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [439,
  1171,
  1503,
  1338,
  1694,
  602,
  232,
  1448,
  70,
  1035,
  494,
  600,
  1017,
  1305,
  1348,
  864,
  1386,
  501,
  522,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [440,
  1176,
  606,
  985,
  710,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [441, 1303, 798, 452, 332, 892, 880, 354, 997, 1590, 136, 963, 785],
 [442,
  922,
  1586,
  965,
  635,
  261,
  505,
  1347,
  589,
  1080,
  288,
  25,
  323,
  434,
  691,
  1329,
  827,
  1107,
  147,
  1163,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [443, 750, 900, 1496, 968, 892, 880, 354, 997, 1590, 136, 963, 785],
 [444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [445,
  502,
  952,
  1200,
  1693,
  1508,
  155,
  942,
  994,
  1654,
  1634,
  497,
  400,
  666,
  1223,
  1445,
  325,
  215,
  1626,
  1479,
  82,
  1318,
  415,
  1516,
  963,
  785],
 [446,
  515,
  1299,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [447,
  1151,
  903,
  97,
  1377,
  95,
  205,
  38,
  395,
  267,
  174,
  1449,
  453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [448,
  1497,
  1700,
  134,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [449,
  1664,
  387,
  1669,
  1507,
  492,
  1394,
  537,
  1002,
  851,
  1135,
  154,
  580,
  398,
  1597,
  203,
  1601,
  380,
  255,
  559,
  1254,
  959,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [450,
  1636,
  242,
  662,
  393,
  1657,
  686,
  478,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [451,
  1555,
  1536,
  747,
  1290,
  1411,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [452, 332, 892, 880, 354, 997, 1590, 136, 963, 785],
 [453,
  568,
  858,
  340,
  649,
  945,
  1087,
  634,
  822,
  1319,
  884,
  730,
  259,
  1497,
  1700,
  134,
  765,
  1459,
  1178,
  774,
  1068,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [454,
  790,
  1251,
  313,
  122,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [455,
  96,
  507,
  48,
  824,
  996,
  1089,
  607,
  116,
  1595,
  857,
  620,
  1212,
  128,
  159,
  760,
  1229,
  874,
  324,
  937,
  483,
  555,
  1400,
  1446,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [456,
  677,
  163,
  1592,
  1365,
  1603,
  372,
  452,
  332,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [457,
  163,
  1592,
  1365,
  1603,
  372,
  452,
  332,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [458,
  435,
  109,
  539,
  1589,
  328,
  309,
  493,
  1343,
  856,
  1491,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [459,
  315,
  1478,
  1594,
  876,
  962,
  1190,
  1341,
  273,
  581,
  1606,
  132,
  1170,
  104,
  1555,
  1536,
  747,
  1290,
  1411,
  1042,
  444,
  916,
  392,
  98,
  268,
  59,
  637,
  352,
  1498,
  597,
  1615,
  1340,
  244,
  1128,
  49,
  1146,
  1179,
  1372,
  906,
  908,
  968,
  892,
  880,
  354,
  997,
  1590,
  136,
  963,
  785],
 [460, 276, 645, 99, 560, 1461, 464, 152, 1516, 963, 785],
 ...]
In [16]:
# Construct Vocabulary, make sure BOS (<xxbos>) idx is 0
vocab_tok = [BOS] + [str(idx) for idx in np.arange(test_data.shape[0])]
# => ['<pad>', '0', '1', '2', '3',... 'seq_len-1']
vocab_ = {idx:tok for idx,tok in enumerate(vocab_tok)}
ids = [[0]+list(np.array(pth)+1) for pth in clust_paths]
ids[150], ids[-1], vocab_[ids[-1][-1]]
Out[16]:
([0,
  869,
  1592,
  372,
  143,
  1472,
  14,
  628,
  1310,
  756,
  1294,
  629,
  1691,
  1282,
  478,
  766,
  1460,
  1179],
 [0,
  1704,
  1016,
  454,
  569,
  859,
  341,
  650,
  946,
  1088,
  635,
  823,
  1320,
  885,
  731,
  260,
  1498,
  1701,
  135,
  766,
  1460,
  1179,
  775,
  1069,
  761,
  1230,
  875,
  325,
  938,
  484,
  556,
  1401,
  1447,
  494,
  1344,
  857,
  1492,
  99,
  269,
  60,
  638,
  353,
  1499,
  598,
  1616],
 '1615')
In [17]:
vocab_tok[0], vocab_tok[-1], list(vocab_.items())[0], list(vocab_.items())[-1]
Out[17]:
('xxbos', '1705', (0, 'xxbos'), (1706, '1705'))
In [18]:
nh=2
nt=128
bs=128
bptt=40
vocab=Vocab(vocab_)
pretrained_weights = np.vstack(([10.0, 10.0], test_data))
nv = len(vocab.itos); nv
Out[18]:
1707
In [20]:
class Model_Density(nn.Module):
    def __init__(self, nv, nh, bs):
        super().__init__()
        self.nv = nv
        self.nh = nh
        self.nt = nt
        self.bs = bs
        self.i_h = nn.Embedding(self.nv,self.nh)
        # intialize the word vectors, pretrained_weights is a 
        # numpy array of size (vocab_size, hidden_size) and 
        # pretrained_weights[i] retrieves the word vector of
        # i-th word in the vocabulary
        self.i_h.weight.data.copy_(torch.from_numpy(pretrained_weights))
        self.i_h.weight.requires_grad = False
        self.h_t = nn.Linear(self.nh,self.nt)
        self.rnn = nn.GRU(self.nt, self.nt, 2, batch_first=True)
        self.h_o = nn.Linear(self.nt,self.nv)
        self.bn = BatchNorm1dFlat(self.nt)
        self.reset()    
    def reset(self):
        self.h = torch.zeros(2, self.bs, self.nt).cuda()  
    def forward(self, x):
        #import pdb;pdb.set_trace()
        res,h = self.rnn(self.h_t(self.i_h(x)), self.h)
        self.h = h.detach()
        return self.h_o(self.bn(res))
    def forward_with_embedded(self, x):
        res,h = self.rnn(self.h_t(x), self.h)
        self.h = h.detach()
        return self.h_o(self.bn(res))
In [21]:
ids = np.array(ids)
data = TextLMDataBunch.from_ids('/tmp', vocab=vocab,
                                train_ids=ids, valid_ids=ids,
                                bs=bs, bptt=bptt, device=device)
data
Out[21]:
TextLMDataBunch;

Train: LabelList (4931 items)
x: LMTextList
xxbos 13 627 1309 755 1293 628 1690 1281 477 765 1459 1178,xxbos 23 142 1471 13 627 1309 755 1293 628 1690 1281 477 765 1459 1178,xxbos 24 633 946 1703 1015 453 568 858 340 649 945 1087 634 822 1319 884 730 259 1497 1700 134 765 1459 1178,xxbos 25 323 434 691 1329 827 1107 147 1163 1178,xxbos 28 78 510 544 1253 1257 1415 1266 125 427 1381 509 1642 932 623 29 1036 551 970 198 1653 1131 306 1039 1325 1416 1459 1178
y: LMLabelList
,,,,
Path: /tmp;

Valid: LabelList (4931 items)
x: LMTextList
xxbos 13 627 1309 755 1293 628 1690 1281 477 765 1459 1178,xxbos 23 142 1471 13 627 1309 755 1293 628 1690 1281 477 765 1459 1178,xxbos 24 633 946 1703 1015 453 568 858 340 649 945 1087 634 822 1319 884 730 259 1497 1700 134 765 1459 1178,xxbos 25 323 434 691 1329 827 1107 147 1163 1178,xxbos 28 78 510 544 1253 1257 1415 1266 125 427 1381 509 1642 932 623 29 1036 551 970 198 1653 1131 306 1039 1325 1416 1459 1178
y: LMLabelList
,,,,
Path: /tmp;

Test: None
In [22]:
data.valid_ds[0][0], data.bptt, len(data.valid_dl)
Out[22]:
(<fastai.text.data.Text at 0x7f9331b57860>, 40, 33)
In [23]:
it = iter(data.valid_dl)
x1,y1 = next(it)
it.close()
In [24]:
x1[0]
Out[24]:
tensor([   0,   14,  628, 1310,  756, 1294,  629, 1691, 1282,  478,  766, 1460,
        1179,    0,   24,  143, 1472,   14,  628, 1310,  756, 1294,  629, 1691,
        1282,  478,  766, 1460, 1179,    0,   25,  634,  947, 1704, 1016,  454,
         569,  859,  341,  650], device='cuda:0')
In [25]:
y1[0]
Out[25]:
tensor([  14,  628, 1310,  756, 1294,  629, 1691, 1282,  478,  766, 1460, 1179,
           0,   24,  143, 1472,   14,  628, 1310,  756, 1294,  629, 1691, 1282,
         478,  766, 1460, 1179,    0,   25,  634,  947, 1704, 1016,  454,  569,
         859,  341,  650,  946], device='cuda:0')
In [26]:
v = data.valid_ds.vocab
v.textify(data.train_ds[0][0].data)
Out[26]:
'xxbos 13 627 1309 755 1293 628 1690 1281 477 765 1459 1178'
In [27]:
random_seed(seed_value=seed, use_cuda=use_cuda)
In [28]:
learn = Learner(data, Model_Density(nv=nv, nh=nh, bs=bs), metrics=accuracy)
In [28]:
learn.lr_find()
learn.recorder.plot()
LR Finder is complete, type {learner_name}.recorder.plot() to see the graph.
In [29]:
learn.fit_one_cycle(200, 1e-2);
epoch train_loss valid_loss accuracy time
1 6.963598 6.794902 0.061997 00:01
2 6.464911 5.819698 0.119366 00:00
3 5.873365 5.014758 0.175243 00:00
4 5.203681 4.212790 0.216862 00:01
5 4.534028 3.603711 0.324402 00:01
6 3.919159 3.043718 0.433694 00:01
7 3.376612 2.603957 0.514631 00:01
8 2.908641 2.259512 0.558771 00:01
9 2.510895 1.953933 0.607931 00:01
10 2.177735 1.715029 0.640229 00:01
11 1.902415 1.521286 0.669354 00:01
12 1.671714 1.366033 0.697195 00:01
13 1.498700 1.275100 0.700142 00:01
14 1.348517 1.146204 0.720307 00:01
15 1.220911 1.040214 0.751497 00:01
16 1.111490 0.959446 0.771893 00:01
17 1.021913 0.939835 0.756729 00:01
18 0.951478 0.828187 0.796117 00:01
19 0.881467 0.781293 0.798183 00:01
20 0.821287 0.736811 0.813494 00:01
21 0.776340 0.700635 0.825302 00:01
22 0.727407 0.666202 0.827752 00:01
23 0.689344 0.642456 0.836015 00:01
24 0.657872 0.621062 0.843572 00:01
25 0.626212 0.546793 0.867519 00:01
26 0.630828 0.694575 0.820739 00:01
27 0.615356 0.552568 0.864844 00:01
28 0.568692 0.501551 0.880125 00:01
29 0.540034 0.509745 0.874473 00:01
30 0.527858 0.512939 0.874526 00:01
31 0.515870 0.484364 0.884020 00:01
32 0.507249 0.541152 0.867288 00:01
33 0.638346 0.613729 0.848680 00:01
34 0.580912 0.475078 0.887630 00:01
35 0.527978 0.454268 0.892714 00:01
36 0.495153 0.442627 0.891738 00:01
37 0.487150 0.545637 0.869650 00:01
38 0.488512 0.449870 0.892205 00:01
39 0.480227 0.464473 0.889341 00:01
40 0.471190 0.448856 0.891767 00:01
41 0.491968 0.475216 0.884843 00:01
42 0.470660 0.450120 0.894087 00:01
43 0.480326 0.446868 0.890927 00:01
44 0.469917 0.483589 0.882842 00:00
45 0.608710 0.709529 0.833167 00:01
46 0.567673 0.460912 0.889897 00:01
47 0.514584 0.430129 0.897491 00:01
48 0.478464 0.414917 0.902172 00:01
49 0.459217 0.436341 0.894567 00:01
50 0.463525 0.451432 0.891992 00:01
51 0.468374 0.460046 0.889293 00:00
52 0.448112 0.419183 0.898976 00:00
53 0.642230 0.570725 0.862512 00:00
54 0.562047 0.430542 0.898763 00:00
55 0.498270 0.417436 0.900787 00:00
56 0.471800 0.413294 0.901711 00:00
57 0.458352 0.418333 0.900503 00:00
58 0.457185 0.466493 0.884683 00:01
59 0.452458 0.448444 0.889962 00:00
60 0.520453 0.444536 0.893608 00:01
61 0.476454 0.404115 0.903823 00:01
62 0.456225 0.478976 0.883055 00:01
63 0.449664 0.412289 0.900118 00:00
64 0.430974 0.401453 0.903764 00:01
65 0.419014 0.447311 0.892205 00:01
66 0.472171 0.499653 0.874101 00:01
67 0.447258 0.388832 0.907937 00:01
68 0.430973 0.452804 0.889110 00:01
69 0.467316 0.407934 0.903468 00:00
70 0.434231 0.381484 0.909440 00:00
71 0.412135 0.410772 0.903303 00:00
72 0.408759 0.488867 0.887571 00:00
73 0.490724 0.463815 0.886328 00:00
74 0.455707 0.383984 0.909055 00:01
75 0.427714 0.384917 0.909339 00:01
76 0.415478 0.428324 0.900012 00:01
77 0.436428 0.435506 0.893608 00:01
78 0.417452 0.372621 0.910701 00:01
79 0.401064 0.368297 0.913429 00:01
80 0.400733 0.376786 0.909624 00:01
81 0.395619 0.367422 0.913630 00:01
82 0.394352 0.387138 0.907067 00:01
83 0.394511 0.355445 0.916365 00:00
84 0.385621 0.378354 0.909452 00:01
85 0.385132 0.390068 0.903835 00:01
86 0.396378 0.408272 0.899651 00:01
87 0.400367 0.381332 0.909594 00:01
88 0.383582 0.342852 0.921135 00:01
89 0.364858 0.335672 0.923597 00:00
90 0.353290 0.344529 0.919087 00:01
91 0.383638 0.400630 0.903184 00:01
92 0.377436 0.344832 0.919910 00:01
93 0.372544 0.349291 0.918543 00:01
94 0.358517 0.340466 0.920828 00:01
95 0.368841 0.397364 0.906682 00:00
96 0.374042 0.365011 0.912630 00:01
97 0.366347 0.332969 0.924041 00:01
98 0.355495 0.333865 0.923544 00:00
99 0.348586 0.339383 0.921011 00:00
100 0.347919 0.341842 0.919490 00:00
101 0.347382 0.335846 0.922413 00:01
102 0.362783 0.361967 0.914003 00:01
103 0.356652 0.331301 0.923467 00:00
104 0.341300 0.316214 0.928959 00:00
105 0.336268 0.323315 0.926497 00:00
106 0.331520 0.319777 0.927770 00:00
107 0.337450 0.363321 0.914424 00:00
108 0.350334 0.339564 0.921437 00:00
109 0.341845 0.322457 0.926971 00:01
110 0.336658 0.320611 0.926598 00:01
111 0.327631 0.306033 0.931925 00:01
112 0.324505 0.342075 0.924467 00:01
113 0.327436 0.318198 0.928350 00:01
114 0.322787 0.311445 0.930048 00:01
115 0.344156 0.358735 0.914796 00:01
116 0.336370 0.307283 0.932268 00:01
117 0.323506 0.304596 0.932191 00:01
118 0.317701 0.300929 0.934612 00:01
119 0.313280 0.297949 0.935216 00:01
120 0.310268 0.307427 0.931463 00:01
121 0.309458 0.310683 0.930268 00:01
122 0.384177 0.398645 0.906499 00:01
123 0.363158 0.310172 0.931860 00:01
124 0.336900 0.305574 0.934061 00:01
125 0.323046 0.296804 0.936843 00:01
126 0.311328 0.291779 0.938299 00:01
127 0.302914 0.292930 0.937192 00:01
128 0.298936 0.290398 0.938246 00:01
129 0.298015 0.294183 0.936358 00:01
130 0.301172 0.297195 0.934641 00:01
131 0.300455 0.293688 0.936269 00:01
132 0.301193 0.294941 0.935754 00:01
133 0.298949 0.290869 0.937003 00:01
134 0.298088 0.292243 0.936967 00:01
135 0.299533 0.302599 0.933055 00:01
136 0.303822 0.296039 0.935085 00:01
137 0.303027 0.292551 0.936376 00:01
138 0.297278 0.284076 0.939903 00:01
139 0.293760 0.286506 0.938920 00:01
140 0.291565 0.282434 0.940388 00:01
141 0.290705 0.287238 0.938447 00:01
142 0.289410 0.282313 0.939832 00:01
143 0.289113 0.283686 0.940205 00:01
144 0.287541 0.281837 0.940868 00:01
145 0.288593 0.285768 0.938808 00:01
146 0.287050 0.281169 0.941028 00:01
147 0.285506 0.281760 0.940128 00:01
148 0.285783 0.283433 0.939406 00:01
149 0.285429 0.278858 0.941992 00:01
150 0.283137 0.278100 0.942229 00:01
151 0.281893 0.278660 0.941560 00:01
152 0.282925 0.281565 0.940743 00:01
153 0.284012 0.279491 0.941211 00:01
154 0.282136 0.277805 0.941714 00:01
155 0.279395 0.273553 0.943945 00:01
156 0.277198 0.272652 0.944336 00:01
157 0.276214 0.275451 0.942821 00:01
158 0.276345 0.273574 0.943235 00:01
159 0.275484 0.272749 0.944602 00:01
160 0.275380 0.274123 0.943353 00:01
161 0.274728 0.270259 0.945230 00:01
162 0.273720 0.272011 0.944460 00:01
163 0.272516 0.270527 0.945419 00:01
164 0.271829 0.269925 0.945301 00:01
165 0.271095 0.270437 0.945508 00:01
166 0.270988 0.269876 0.945005 00:01
167 0.270572 0.268732 0.945999 00:01
168 0.270086 0.268094 0.946182 00:01
169 0.269361 0.267193 0.946751 00:01
170 0.269130 0.267513 0.946265 00:01
171 0.268558 0.266665 0.946869 00:01
172 0.268545 0.267306 0.946615 00:01
173 0.267921 0.267159 0.946313 00:01
174 0.267430 0.267400 0.947112 00:01
175 0.267353 0.267791 0.946774 00:01
176 0.266944 0.266150 0.947337 00:01
177 0.266300 0.265913 0.947461 00:01
178 0.266342 0.266259 0.946934 00:01
179 0.266139 0.264837 0.947721 00:01
180 0.264860 0.264283 0.948053 00:01
181 0.265056 0.264293 0.948053 00:01
182 0.264797 0.265251 0.947751 00:01
183 0.264892 0.265048 0.947656 00:01
184 0.264637 0.264979 0.947846 00:01
185 0.263912 0.263642 0.948473 00:01
186 0.264208 0.263990 0.948201 00:01
187 0.263611 0.263521 0.948396 00:01
188 0.263421 0.263204 0.948579 00:01
189 0.262918 0.262930 0.948491 00:01
190 0.263049 0.263701 0.948343 00:01
191 0.263108 0.263562 0.948343 00:01
192 0.263592 0.262764 0.948384 00:01
193 0.262928 0.263324 0.948698 00:01
194 0.262404 0.262950 0.948846 00:01
195 0.262710 0.262878 0.948887 00:01
196 0.262429 0.263130 0.948822 00:01
197 0.262432 0.262794 0.948929 00:01
198 0.262091 0.263258 0.948816 00:01
199 0.262178 0.262949 0.948769 00:01
200 0.262225 0.263108 0.948680 00:01
In [30]:
# epoch 	train_loss 	valid_loss 	accuracy
#1      	6.533147    	6.611041 	0.070201
#100    	0.397764    	0.400868 	0.915774
learn.recorder.plot_losses()
In [31]:
#epoch 	train_loss 	valid_loss 	accuracy
#200 	0.390284 	0.403612 	0.915976
learn.recorder.plot_metrics()
In [32]:
preds = learn.get_preds()
preds
Out[32]:
[tensor([[[2.8479e-04, 1.8869e-04, 4.3575e-04,  ..., 8.4788e-04,
           2.6956e-04, 3.5801e-04],
          [1.7085e-17, 1.1150e-14, 1.8210e-11,  ..., 4.0638e-44,
           2.2000e-28, 8.1448e-26],
          [7.8851e-16, 2.7053e-11, 1.4436e-16,  ..., 1.2025e-37,
           6.8574e-24, 1.1375e-25],
          ...,
          [1.8901e-12, 3.6819e-16, 4.7385e-19,  ..., 3.4443e-20,
           1.6986e-16, 2.8513e-11],
          [9.4661e-09, 8.5445e-14, 1.1175e-16,  ..., 6.2395e-15,
           9.2918e-11, 1.5421e-10],
          [4.7298e-11, 4.8956e-14, 6.2887e-12,  ..., 4.3914e-18,
           1.7530e-16, 6.7330e-12]],
 
         [[3.6172e-04, 3.3068e-17, 8.0418e-15,  ..., 3.2444e-24,
           2.7267e-15, 5.0313e-16],
          [2.1805e-02, 6.3079e-14, 2.4923e-11,  ..., 8.6841e-13,
           8.2163e-15, 2.8146e-11],
          [3.7471e-02, 5.0946e-14, 3.8290e-08,  ..., 1.5797e-22,
           8.0887e-15, 2.6675e-18],
          ...,
          [4.9356e-17, 1.3540e-15, 2.3281e-14,  ..., 2.3009e-23,
           1.6231e-20, 8.8961e-20],
          [1.0800e-12, 9.8252e-16, 2.8415e-11,  ..., 2.9500e-24,
           2.4244e-15, 5.2620e-20],
          [3.0381e-13, 8.9189e-14, 5.5556e-14,  ..., 7.7678e-23,
           8.5923e-18, 3.8642e-20]],
 
         [[2.8113e-05, 1.1214e-13, 1.9222e-08,  ..., 1.3067e-22,
           3.3412e-16, 4.1342e-19],
          [9.9345e-07, 6.4545e-14, 4.5189e-14,  ..., 7.3972e-24,
           1.4223e-16, 3.8546e-16],
          [4.9144e-06, 1.7640e-16, 3.6300e-13,  ..., 1.8892e-24,
           1.9568e-15, 4.5422e-20],
          ...,
          [7.8641e-07, 2.4779e-18, 1.6241e-13,  ..., 8.4795e-23,
           6.1372e-18, 2.2027e-16],
          [1.9368e-01, 3.4251e-13, 2.4147e-14,  ..., 1.3825e-17,
           1.9598e-13, 1.0961e-17],
          [2.8557e-04, 2.0118e-04, 4.2282e-04,  ..., 1.0479e-03,
           3.7418e-04, 4.2860e-04]],
 
         ...,
 
         [[3.1594e-10, 2.5360e-15, 8.2831e-16,  ..., 8.5042e-19,
           1.0322e-16, 2.8472e-12],
          [2.0106e-09, 1.8444e-14, 1.6972e-12,  ..., 8.3508e-17,
           1.7839e-15, 5.3716e-12],
          [5.2737e-09, 1.3271e-11, 1.8439e-15,  ..., 5.1572e-17,
           7.8568e-18, 5.4577e-12],
          ...,
          [2.8509e-19, 4.7372e-21, 2.6799e-26,  ..., 6.0342e-26,
           3.5648e-23, 2.3521e-20],
          [5.1526e-15, 1.9939e-17, 5.6215e-22,  ..., 1.9845e-20,
           1.9624e-23, 8.7569e-20],
          [1.4218e-15, 4.1651e-18, 1.7305e-19,  ..., 7.8990e-20,
           1.4646e-22, 2.5532e-23]],
 
         [[3.7014e-08, 1.0103e-11, 3.1732e-12,  ..., 6.2884e-13,
           4.0975e-10, 1.8280e-13],
          [6.7632e-09, 2.0421e-13, 1.0572e-11,  ..., 4.7765e-15,
           9.8374e-13, 4.8582e-14],
          [6.3062e-10, 2.5786e-12, 2.3445e-12,  ..., 8.5418e-14,
           1.6164e-13, 1.5980e-13],
          ...,
          [3.8690e-08, 6.1642e-14, 1.1585e-15,  ..., 2.1993e-17,
           4.4787e-17, 9.5281e-19],
          [1.3778e-07, 1.0438e-14, 4.0264e-16,  ..., 6.6913e-18,
           5.2772e-17, 6.9405e-18],
          [1.0039e-06, 8.0889e-16, 7.2880e-16,  ..., 2.0680e-17,
           3.7356e-15, 3.4826e-18]],
 
         [[3.9247e-12, 2.1604e-14, 6.1315e-12,  ..., 1.5716e-19,
           1.0281e-17, 2.3410e-13],
          [1.6816e-11, 5.5593e-16, 7.4606e-15,  ..., 1.6211e-16,
           1.5126e-17, 4.5233e-13],
          [4.6010e-10, 1.7874e-15, 3.8081e-11,  ..., 2.1024e-16,
           1.2333e-11, 8.1597e-11],
          ...,
          [3.2753e-01, 3.3656e-10, 9.3145e-12,  ..., 5.6315e-13,
           1.1290e-10, 4.9703e-16],
          [2.4188e-04, 2.0839e-04, 3.9753e-04,  ..., 9.8902e-04,
           3.8965e-04, 4.4383e-04],
          [9.4469e-18, 4.5557e-14, 1.6877e-11,  ..., 3.1389e-43,
           2.9588e-28, 1.2444e-25]]]),
 tensor([[  14,  628, 1310,  ...,  341,  650,  946],
         [1326, 1417, 1460,  ...,  260, 1498, 1701],
         [ 135,  766, 1460,  ..., 1179,    0,  900],
         ...,
         [1090,  608,  117,  ...,  452, 1556, 1537],
         [ 256,  560, 1255,  ...,  353, 1499,  598],
         [ 946, 1088,  635,  ...,    0,   14,  628]])]

Load a RNN model already trained

Call the function from the jupyter notebook current folder:

  • learn.load(datapath4file(URLs.LOCAL_PATH/'models'/'name.gru.model.you.want.to.load'))

You need to run the class Model_Density(nn.Module) cells ABOVE first!

Check the outputs of the learned model:
In [30]:
m = learn.model
#set batch size to 1
learn.model.bs=1
#turn off dropout
m.eval()
#reset hidden state
m.reset() 
idxs = learn.data.train_ds[150][0].data
idxs = idxs[None,:]
print(idxs, idxs.shape)
p = m.forward(torch.tensor(idxs, device=device))
cls = F.softmax(p[0], dim=1).argmax(dim=1)
cls
[[   0  869 1592  372  143 1472   14  628 1310  756 1294  629 1691 1282  478  766 1460 1179]] (1, 18)
Out[30]:
tensor([ 210, 1592,  372,  143, 1472,   14,  628, 1310,  756, 1294,  629, 1691,
        1282,  478,  766, 1460, 1179,  775], device='cuda:0')
In [31]:
m = learn.model
#set batch size to 1
learn.model.bs=1
#turn off dropout
m.eval()
#reset hidden state
m.reset() 

idxs = np.array([0, 869, 1592, 372, 143, 1472])
idxs = idxs[None,:]
p = m.forward(torch.tensor(idxs, device=device))
cls = F.softmax(p[0], dim=1).argmax(dim=1)
cls
Out[31]:
tensor([ 210, 1592,  372,  143, 1472,   14], device='cuda:0')
In [32]:
m.i_h(torch.tensor(idxs, device=device))
Out[32]:
tensor([[[10.0000, 10.0000],
         [ 1.2430,  1.6694],
         [ 1.2245,  1.6757],
         [ 1.2272,  1.6428],
         [ 1.2363,  1.6086],
         [ 1.2392,  1.5499]]], device='cuda:0')
In [33]:
m = learn.model
#set batch size to 1
learn.model.bs=1
#turn off dropout
m.eval()
#reset hidden state
m.reset() 

p = m.forward_with_embedded(torch.tensor([[[10.0000, 10.0000],
         [ 1.2430,  1.6694],
         [ 1.2245,  1.6757],
         [ 1.2272,  1.6428],
         [ 1.2363,  1.6086],
         [ 1.2392,  1.5499]]], device=device))
cls = F.softmax(p[0], dim=1).argmax(dim=1)
cls
Out[33]:
tensor([ 210, 1592,  372,  143, 1472,   14], device='cuda:0')
In [34]:
sorted([ridx+1 for ridx in hqshift._idx_root])
Out[34]:
[494, 786, 969, 1179, 1480, 1616]

And test the RNN output with some rounded inputs:

In [36]:
from sklearn.neighbors import NearestNeighbors

import time
fig, ax = plt.subplots(figsize=fig_size)
t0 = time.time()
sns.kdeplot(test_data[:,0],test_data[:,1],
            gridsize=256, n_levels=64, bw=hqshift._emst.bandwidth, 
            ax=ax, shade_lowest=False, linestyles='--', linewidths=0.5)
print(time.time()-t0)

nbrs = NearestNeighbors(n_neighbors=1, algorithm='ball_tree').fit(test_data)
query_points = [[-0.200, 0.2000], [-0.300, -0.100], [1.0000, 1.2000], [1.3000, 1.4000],
                [1.0000, 1.8000], [-2.000, 1.0000], [-1.200, 1.8000], [-1.200, -1.000],
                [1.2000, -1.000], [1.2000, -0.800], [1.0000, -0.500], [-1.200, -0.500],
                [-2.000, 1.4000], [-1.000, 0.4000], [-2.000, 2.0000], [-0.800, -1.100],
                [1.6000, -0.500], [1.5500, -1.000], [-0.800, -0.300], [-1.250, 0.0000],
                [0.0000, -1.300], [0.3000, -1.300], [0.0000, -0.500], [0.7000, -0.800],
                [0.6000, -0.400], [0.6000, 1.5000], [-1.300, 1.6000], [1.0000, 1.7500],
                [0.3500, -1.000], [-0.800, -0.500], [1.2000, 1.9000], [1.2000, -0.700],
                [0.5000, -0.100], [0.0000, 0.5000], [1.2000, -0.700], [0.5000, 1.0000],
                [0.8000, -1.400], [1.5000, -1.400], [0.5000, 1.5000], [1.0000, 0.0000]]

plt.setp(plt.gca().get_xticklabels(), visible=False)
plt.setp(plt.gca().get_yticklabels(), visible=False)
plt.tight_layout()

distances, indices = nbrs.kneighbors(query_points)
#_idx_root := [494, 786, 969, 1179, 1480, 1616]
for idx in hqshift._idx_root:
    plt.plot((test_data[idx][0]),
             (test_data[idx][1]), 'X', color='k', markersize=16)

for q_p in indices:   
    m = learn.model
    #set batch size to 1
    learn.model.bs=1
    #turn off dropout
    m.eval()
    #reset hidden state
    m.reset()

    idxs = np.array([0, q_p[0]+1])
    idxs = torch.tensor(idxs, device=device)
    for _ in np.arange(bptt):
        p = m.forward(idxs[None,:])
        cls = F.softmax(p[0], dim=1).argmax(dim=1)
        if cls[-1].item()==0: print(0); break
        idxs = torch.cat((idxs, cls[-1].unsqueeze(0)))
        if cls[-1].item() in ([ridx+1 for ridx in hqshift._idx_root]): break
#     print(idxs)    
#     import pdb;pdb.set_trace()
    plt.plot(*test_data[idxs[1:]-1][0], 'o', color='purple', markersize=10)
    plt.plot(*test_data[idxs[1:]-1].T, color='purple', linewidth=2)
    
curaxes = plt.gca()
curaxes.set_xlim(ax_xlim)
curaxes.set_ylim(ax_ylim) 
fig.savefig('rnn_out.png', dpi=200, bbox_inches='tight')
9.184154033660889

Sklearn Benchmarks

Synthetic Dataset Comparison: HQuickShift settings are min cluster size = 30, min mode size = 50, min sample = 1 and the bandwidth is estimated with EMST of HQuickShift. Quickshift++ settings are fixed at k = 20, β = 0.7 as suggested by authors. For DBSCAN we use the settings given in sklearn clustering benchmark.The bandwidth for QuickShift is estimated with estimate bandwidth routine (parameter quantile = .005) of sklearn.cluster module and tuned further to obtain a reasonable number of clusters for all datasets.

In [26]:
%reload_ext autoreload
%autoreload 2
%matplotlib inline

import warnings

import numpy as np
import matplotlib.pyplot as plt

from sklearn import cluster, datasets, mixture
from sklearn.neighbors import kneighbors_graph
from sklearn.preprocessing import StandardScaler
from itertools import cycle, islice
from QuickshiftPP import *
import hdbscan

np.random.seed(0)

# ============
# Generate datasets. We choose the size big enough to see the scalability
# of the algorithms, but not too big to avoid too long running times
# ============
n_samples = 1500
noisy_circles = datasets.make_circles(n_samples=n_samples, factor=.5,
                                      noise=.05)
noisy_moons = datasets.make_moons(n_samples=n_samples, noise=.05)
blobs = datasets.make_blobs(n_samples=n_samples, random_state=8)
no_structure = np.random.rand(n_samples, 2), None

# Anisotropicly distributed data
random_state = 170
X, y = datasets.make_blobs(n_samples=n_samples, random_state=random_state)
transformation = [[0.6, -0.6], [-0.4, 0.8]]
X_aniso = np.dot(X, transformation)
aniso = (X_aniso, y)

# blobs with varied variances
varied = datasets.make_blobs(n_samples=n_samples,
                             cluster_std=[1.0, 2.5, 0.5],
                             random_state=random_state)

# ============
# Set up cluster parameters
# ============
plt.figure(figsize=fig_size)
plt.subplots_adjust(left=.02, right=.98, bottom=.001, top=.96, wspace=.05,
                    hspace=.01)

plot_num = 1

default_base = {'quantile': .005,
                'eps': .3,
                'damping': .9,
                'preference': -200,
                'n_neighbors': 10,
                'n_clusters': 3,
                'min_samples': 20,
                'xi': 0.05,
                'min_cluster_size': 0.1}
datasets = [
    (noisy_circles, {'damping': .77, 'preference': -240,
                     'quantile': .2, 'n_clusters': 2,
                     'min_samples': 20, 'xi': 0.25}),
    (noisy_moons, {'damping': .75, 'preference': -220, 'n_clusters': 2}),
    (varied, {'eps': .18, 'n_neighbors': 2,
              'min_samples': 5, 'xi': 0.035, 'min_cluster_size': .2}),
    (aniso, {'eps': .15, 'n_neighbors': 2,
             'min_samples': 20, 'xi': 0.1, 'min_cluster_size': .2}),
    (blobs, {}),
    (no_structure, {})]

for i_dataset, (dataset, algo_params) in enumerate(datasets):
    # update parameters with dataset-specific values
    params = default_base.copy()
    params.update(algo_params)

    X, y = dataset

    # normalize dataset for easier parameter selection
    X = StandardScaler().fit_transform(X)

    # estimate bandwidth for mean shift
    bandwidth = cluster.estimate_bandwidth(X, quantile=params['quantile'])

    # connectivity matrix for structured Ward
    connectivity = kneighbors_graph(
        X, n_neighbors=params['n_neighbors'], include_self=False)
    # make connectivity symmetric
    connectivity = 0.5 * (connectivity + connectivity.T)

    # ============
    # Create cluster objects
    # ============
    hqshift = HQSHIFT(min_cluster_size=30, min_mode_size=50, min_samples=1, allow_single_cluster=True)
    quickshiftpp = QuickshiftPP(k=20, beta=.7)
    dbscan = cluster.DBSCAN(eps=params['eps'])
    quick_s_norm = QuickShift(window_type="normal", bandwidth=bandwidth*0.85)

    clustering_algorithms = (
        ('HQuickShift', hqshift),
        ('Quickshift++', quickshiftpp),
        ('DBSCAN', dbscan),
        ('QuickShift', quick_s_norm),)

    for name, algorithm in clustering_algorithms:

        # catch warnings related to kneighbors_graph
        with warnings.catch_warnings():
            warnings.filterwarnings(
                "ignore",
                message="the number of connected components of the " +
                "connectivity matrix is [0-9]{1,2}" +
                " > 1. Completing it to avoid stopping the tree early.",
                category=UserWarning)
            warnings.filterwarnings(
                "ignore",
                message="Graph is not fully connected, spectral embedding" +
                " may not work as expected.",
                category=UserWarning)
            algorithm.fit(X)

        if hasattr(algorithm, 'labels_'):
            y_pred = algorithm.labels_.astype(np.int)
        elif hasattr(algorithm, 'memberships'):
            y_pred = algorithm.memberships
        else:
            y_pred = algorithm.predict(X)

        plt.subplot(len(datasets), len(clustering_algorithms), plot_num)
        if i_dataset == 0:
            plt.title(name, size=28)

        colors = np.array(list(islice(cycle(['#377eb8', '#ff7f00', '#4daf4a',
                                             '#f781bf', '#a65628', '#984ea3',
                                             '#999999', '#e41a1c', '#dede00']),
                                      int(max(y_pred) + 1))))
        # add black color for outliers (if any)
        colors = np.append(colors, ["#000000"])
        plt.scatter(X[:, 0], X[:, 1], s=10, color=colors[y_pred])

        plt.xlim(-2.5, 2.5)
        plt.ylim(-2.5, 2.5)
        plt.xticks(())
        plt.yticks(())
        plot_num += 1

plt.savefig('benchmark_sklearn.png', dpi=200, bbox_inches='tight')
plt.show()

Real Dataset Benchmarks

In [30]:
#%reload_ext memory_profiler
from sklearn.cluster import KMeans
from sklearn.metrics import adjusted_rand_score, adjusted_mutual_info_score
from sklearn.decomposition import PCA
from sklearn.preprocessing import scale
import math
import umap
import time
import warnings
warnings.simplefilter(action='ignore', category=FutureWarning)
seed = 0
seed_lst = [0, 23, 42, 1234, 43210, 1133557799, 22446688, 123456789, 987654321, 86420]
random_seed(seed_value=seed, use_cuda=use_cuda)

Collect and Plot Batch Run Results

The *.pkl files contained results of the experiments are produced with parameter_select.py script located under data_sets\XX_bunch folders. First run those scripts to produce the results and then you can collect and plot the results here:

In [28]:
import pickle

datasets = ['banknote', 'glass', 'iris', 'mnist', 'seeds', 'phoneme', 'page-blocks']
methods  = ['kmeans', 'qshift', 'hdbscan', 'qshiftpp']
rowidx   = {ds:i for i,ds in enumerate(datasets)}
colidx   = {mt:i for i,mt in enumerate(methods)}

fig1, axes1 = plt.subplots(nrows=len(datasets), ncols=len(methods)+2, sharey="row", figsize=fig_size)
plt.subplots_adjust(left=.02, right=.98, bottom=.001, top=.96, wspace=.05, hspace=.01)
for filename in glob.iglob('data_sets/**/*.pkl', recursive=True):
#     print(filename)
    fname = os.path.splitext(os.path.basename(filename))[0]
#     print(fname)
    fname_parts = fname.split('_')
    dataset = fname_parts[0]
    method = fname_parts[1]
    ri, ci = rowidx[dataset], colidx[method]
    if len(fname_parts) == 2:
        if ri == 0:
            axes1[ri, ci].set_title(method, size=24)
        if ci == 0:
            axes1[ri, ci].set_ylabel(dataset, size=24)
        with open(filename, 'rb') as input_file:
            datadict = pickle.load(input_file)
            if method == 'kmeans': 
                axes1[ri, ci].plot(datadict['k'], datadict['ami'], 'b.-', label='AMI')
                axes1[ri, ci].plot(datadict['k'], datadict['ari'], 'r.-', label='ARI')
                axes1[ri, ci].grid()
                if ri == 0 and ci == 0: axes1[ri, ci].legend(loc='upper left')
           
            elif method == 'qshift':
                axes1[ri, ci].plot(datadict['bw'], datadict['ami'], 'b.-',
                                   datadict['bw'], datadict['ari'], 'r.-')
                axes1[ri, ci].grid()
                
            elif method == 'hdbscan':
                if ri == 0: axes1[ri, ci].set_title('hqshift[min_clust]', size=20)
                best_idx_ari = np.argmax(datadict['ari'])
                best_idx_nmi = np.argmax(datadict['ami'])
                best_min_sample_size = datadict['k'][best_idx_nmi]
                best_k_idx = np.where(np.array(datadict['k'])==best_min_sample_size)[0]
                axes1[ri, ci].plot(np.array(datadict['minclst'])[best_k_idx],
                                   np.array(datadict['ami'])[best_k_idx], 'b.-',
                                   np.array(datadict['minclst'])[best_k_idx],
                                   np.array(datadict['ari'])[best_k_idx], 'r.-')
                axes1[ri, ci].grid()
                print(dataset, method, best_min_sample_size)
                
                if ri == 0: axes1[ri, ci+1].set_title('hqshift[min_sample]', size=20)                
                best_minclst = datadict['minclst'][best_idx_nmi]
                best_minclst_idx = np.where(np.array(datadict['minclst'])==best_minclst)[0]
                axes1[ri, ci+1].plot(np.array(datadict['k'])[best_minclst_idx],
                                   np.array(datadict['ami'])[best_minclst_idx], 'b.-',
                                   np.array(datadict['k'])[best_minclst_idx],
                                   np.array(datadict['ari'])[best_minclst_idx], 'r.-')
                axes1[ri, ci+1].grid()
                print(dataset, method, best_minclst)
            else:
                if ri == 0: axes1[ri, ci+1].set_title('qshift++[beta]', size=24) 
                best_idx_ari = np.argmax(datadict['ari'])
                best_idx_nmi = np.argmax(datadict['ami'])
                best_k_idx = np.where(np.array(datadict['k'])==datadict['k'][best_idx_nmi])[0]
                axes1[ri, ci+1].plot(np.array(datadict['b'])[best_k_idx],
                                   np.array(datadict['ami'])[best_k_idx], 'b.-',
                                   np.array(datadict['b'])[best_k_idx],
                                   np.array(datadict['ari'])[best_k_idx], 'r.-')
                axes1[ri, ci+1].grid()
                print(dataset, method, datadict['k'][best_idx_nmi])
                
                if ri == 0: axes1[ri, ci+2].set_title('qshift++[k]', size=24) 
                best_idx_ari = np.argmax(datadict['ari'])
                best_idx_nmi = np.argmax(datadict['ami'])
                best_b_idx = np.where(np.array(datadict['b'])==datadict['b'][best_idx_nmi])[0]
                axes1[ri, ci+2].plot(np.array(datadict['k'])[best_b_idx],
                                   np.array(datadict['ami'])[best_b_idx], 'b.-',
                                   np.array(datadict['k'])[best_b_idx],
                                   np.array(datadict['ari'])[best_b_idx], 'r.-')
                axes1[ri, ci+2].grid()
                print(dataset, method, datadict['b'][best_idx_nmi])
        plt.tight_layout()
        plt.savefig('experiment_1.png', dpi=200, bbox_inches='tight')
seeds qshiftpp 19
seeds qshiftpp 0.6
iris qshiftpp 18
iris qshiftpp 0.1
banknote qshiftpp 66
banknote qshiftpp 0.6
mnist hdbscan 6
mnist hdbscan 15
seeds hdbscan 43
seeds hdbscan 15
mnist qshiftpp 77
mnist qshiftpp 0.30000000000000004
banknote hdbscan 2
banknote hdbscan 114
glass qshiftpp 28
glass qshiftpp 0.4
iris hdbscan 4
iris hdbscan 15
glass hdbscan 2
glass hdbscan 11
page-blocks hdbscan 19
page-blocks hdbscan 15
page-blocks qshiftpp 260
page-blocks qshiftpp 0.6
phoneme hdbscan 71
phoneme hdbscan 15
phoneme qshiftpp 46
phoneme qshiftpp 0.8
In [64]:
fig2, axes2 = plt.subplots(nrows=len(datasets), ncols=len(methods), sharey="row", figsize=fig_size)
plt.subplots_adjust(left=.02, right=.98, bottom=.001, top=.96, wspace=.05, hspace=.01)
for filename in glob.iglob('data_sets/**/*.pkl', recursive=True):
#     print(filename)
    fname = os.path.splitext(os.path.basename(filename))[0]
#     print(fname)
    fname_parts = fname.split('_')
    dataset = fname_parts[0]
    method = fname_parts[1]
    ri, ci = rowidx[dataset], colidx[method]
    if len(fname_parts) > 2:
        if ci == 0:
            axes2[ri, ci].set_ylabel(dataset, size=24)
        with open(filename, 'rb') as input_file:
            datadict = pickle.load(input_file)
            t = np.arange(1, 11)[1:]
            axes2[ri, ci].plot(t, datadict['ami'][1:], 'b.-', label='AMI') 
            axes2[ri, ci].plot(t, datadict['ari'][1:], 'r.-', label='ARI')
            axes2[ri, ci].tick_params('y', colors='b')
            #axes2[ri, ci].set_xlabel('Run [index]')
            ax2 = axes2[ri, ci].twinx()
            ax2.plot(t, datadict['n_clusters'][1:], 'g.-', label='N_CLUST')
            ax2.tick_params('y', colors='g')
            #ax2.set_ylabel('number of clusters', color='g')  
            if method == 'kmeans':
                if ri == 0: axes2[ri, ci].set_title(method, size=24)
                if ri == 0 and ci == 0:
                    axes2[ri, ci].legend(loc='upper left')
                    ax2.legend(loc='upper right')
            elif method == 'qshift':
                if ri == 0: axes2[ri, ci].set_title('qshift', size=24)
            elif method == 'hdbscan':
                if ri == 0: axes2[ri, ci].set_title('hqshift', size=24)     
            else:
                if ri == 0: axes2[ri, ci].set_title('qshift++', size=24) 
        
        axes2[ri, ci].grid()
        plt.tight_layout()
        plt.savefig('experiment_2.png', dpi=200, bbox_inches='tight')

Quick Test

In this section, we perform some tests with real datasets to demonstrate their performance:

In [60]:
#data0 = pd.read_csv("data_sets/00_bunch/banknote/banknote.csv", header=None)
#data0 = pd.read_csv("data_sets/00_bunch/glass/glass.csv", header=None)
#data0 = pd.read_csv("data_sets/00_bunch/iris/iris.csv", header=None)
#data0 = pd.read_csv("data_sets/00_bunch/mnist/mnist.csv", header=None)
#data0 = pd.read_csv("data_sets/00_bunch/seeds/seeds.csv", header=None)
data0 = pd.read_csv("data_sets/01_bunch/phoneme/phoneme.csv", header=None)
#data0 = pd.read_csv("data_sets/02_bunch/page-blocks/page-blocks.csv", header=None)

X = data0.iloc[:,:-1].values
y = data0.iloc[:, -1].values
In [61]:
X.shape, y.shape
Out[61]:
((4508, 256), (4508,))
In [62]:
num_classes = np.unique(y).shape[0]
num_samples = y.shape[0]
# find minority class label
num_samples_minority = math.inf
for lbl in np.unique(y):
    curr_num_samples = y[y==lbl].shape[0]
    if curr_num_samples < num_samples_minority:
        num_samples_minority = curr_num_samples
        min_label = lbl
print(min_label, num_classes, num_samples_minority, num_samples)
2 5 695 4508
In [63]:
if X.shape[1] > 4: 
    X = umap.UMAP(
        n_neighbors=50,
        min_dist=0.0,
        n_components=4,
        random_state=seed,
    ).fit_transform(X)
In [279]:
X.shape, y.shape
Out[279]:
((1372, 4), (1372,))

HDBSCAN

In [280]:
best_min_cluster_size = 17
if X.shape[0] > 1000:
    best_min_cluster_size = 125
best_min_sample_size = 3
best_min_mode_size = best_min_cluster_size
In [281]:
clusterer = hdbscan.HDBSCAN(min_cluster_size=best_min_cluster_size, min_samples=best_min_sample_size)
y_hat = clusterer.fit_predict(X)
print("Adj. Rand Index Score: %f." % adjusted_rand_score(y_hat, y))
print("Adj. Mutual Info Score: %f." % adjusted_mutual_info_score(y_hat, y))
print('Classes: ', np.unique(y_hat), 'n_clusters: ', np.unique(y_hat)[-1]+1)
Adj. Rand Index Score: 0.792122.
Adj. Mutual Info Score: 0.625601.
Classes:  [-1  0  1] n_clusters:  2
In [282]:
clustered = (y_hat >= 0)
(
    adjusted_rand_score(y[clustered], y_hat[clustered]),
    adjusted_mutual_info_score(y[clustered], y_hat[clustered])
)
Out[282]:
(0.9649402548773788, 0.9342344910570453)
In [283]:
y_hat[y_hat==-1].shape
Out[283]:
(127,)

Quickshift++

In [284]:
X_qspp = X.copy(order='C')
k=25
if X.shape[0] > 1000:
    k = 60
model = QuickshiftPP(k=k, beta=.6)
# Compute the clustering.
model.fit(X_qspp)
y_hat = model.memberships
print("Adj. Rand Index Score: %f." % adjusted_rand_score(y_hat, y))
print("Adj. Mutual Info Score: %f." % adjusted_mutual_info_score(y_hat, y))
print('Classes: ', np.unique(y_hat), 'n_clusters: ', np.unique(y_hat)[-1]+1)
Adj. Rand Index Score: 0.690146.
Adj. Mutual Info Score: 0.555389.
Classes:  [0 1 2 3] n_clusters:  4

HQSHIFT

In [285]:
hqshift = HQSHIFT(min_cluster_size=best_min_cluster_size,
                  min_mode_size=best_min_mode_size,
                  min_samples=best_min_sample_size,
                  allow_single_cluster=False).fit(X)
# cluster labels
y_hat = hqshift.labels_
print("Adj. Rand Index Score: %f." % adjusted_rand_score(y_hat, y))
print("Adj. Mutual Info Score: %f." % adjusted_mutual_info_score(y_hat, y))
print('Classes: ', np.unique(y_hat), 'n_clusters: ', np.unique(y_hat)[-1]+1)
Adj. Rand Index Score: 0.798521.
Adj. Mutual Info Score: 0.632384.
Classes:  [-1  0  1] n_clusters:  2
In [286]:
clustered = (y_hat >= 0)
(
    adjusted_rand_score(y[clustered], y_hat[clustered]),
    adjusted_mutual_info_score(y[clustered], y_hat[clustered])
)
Out[286]:
(0.9650794905641422, 0.9344698677014239)
In [287]:
y_hat[y_hat==-1].shape
Out[287]:
(122,)
In [288]:
len(hqshift.paths), hqshift._idx_root
Out[288]:
(1372, [1334, 1002, 35, 180, 63, 1062])
In [289]:
clust_paths = [[j]+pth[:pth.index(idx)+1] for idx in hqshift._idx_root 
                                          for j, pth in enumerate(hqshift.paths) if idx in pth]
clust_data = [X[pth] for pth in clust_paths] 
# Construct Vocabulary, make sure BOS (<xxbos>) idx is 0
vocab_tok = [BOS] + [str(idx) for idx in np.arange(X.shape[0])]
# => ['<pad>', '0', '1', '2', '3',... 'seq_len-1']
vocab_ = {idx:tok for idx,tok in enumerate(vocab_tok)}
ids = [[0]+list(np.array(pth)+1) for pth in clust_paths]
#ids[150], ids[-1], vocab_[ids[-1][-1]]
len(clust_paths)
Out[289]:
3058
In [290]:
nh=256
bs=128
bptt=20
vocab=Vocab(vocab_)
nv = len(vocab.itos); nv
Out[290]:
1373
In [291]:
class Model_Density(nn.Module):
    def __init__(self, nv, nh, bs):
        super().__init__()
        self.nv = nv
        self.nh = nh
        self.bs = bs
        self.i_h = nn.Embedding(self.nv,self.nh)
        self.rnn = nn.GRU(self.nh, self.nh, 2, batch_first=True)
        self.h_o = nn.Linear(self.nh, self.nv)
        self.bn = BatchNorm1dFlat(self.nh)
        self.reset()
    def reset(self):
        self.h = torch.zeros(2, self.bs, self.nh).cuda()  
    def forward(self, x):
        res,h = self.rnn(self.i_h(x), self.h)
        self.h = h.detach()
        return self.h_o(self.bn(res))
In [292]:
ids = np.array(ids)
data = TextLMDataBunch.from_ids('/tmp', vocab=vocab,
                                train_ids=ids, valid_ids=ids,
                                bs=bs, bptt=bptt, device=device)
data
Out[292]:
TextLMDataBunch;

Train: LabelList (3058 items)
x: LMTextList
xxbos 30 936 1058 1119 814 1029 1266 997 1334,xxbos 95 326 908 1030 1091 1152 847 786 969 915 834 1002 903 916 1215 849 922 1205 1266 997 1334,xxbos 326 908 1030 1091 1152 847 786 969 915 834 1002 903 916 1215 849 922 1205 1266 997 1334,xxbos 349 807 1226 1287 1165 860 799 1348 1104 921 1043 1113 930 846 968 785 1334,xxbos 764 825 1037 1139 773 1198 1137 1259 915 834 1002 903 916 1215 849 922 1205 1266 997 1334
y: LMLabelList
,,,,
Path: /tmp;

Valid: LabelList (3058 items)
x: LMTextList
xxbos 30 936 1058 1119 814 1029 1266 997 1334,xxbos 95 326 908 1030 1091 1152 847 786 969 915 834 1002 903 916 1215 849 922 1205 1266 997 1334,xxbos 326 908 1030 1091 1152 847 786 969 915 834 1002 903 916 1215 849 922 1205 1266 997 1334,xxbos 349 807 1226 1287 1165 860 799 1348 1104 921 1043 1113 930 846 968 785 1334,xxbos 764 825 1037 1139 773 1198 1137 1259 915 834 1002 903 916 1215 849 922 1205 1266 997 1334
y: LMLabelList
,,,,
Path: /tmp;

Test: None
In [293]:
random_seed(seed_value=seed, use_cuda=use_cuda)
learn = Learner(data, Model_Density(nv, nh, bs), metrics=accuracy)
In [294]:
learn.lr_find()
learn.recorder.plot()
LR Finder is complete, type {learner_name}.recorder.plot() to see the graph.
In [295]:
learn.fit_one_cycle(50, 1e-2);
epoch train_loss valid_loss accuracy time
1 5.977040 6.072595 0.635326 00:00
2 4.842335 3.327955 0.767663 00:00
3 3.639819 1.595332 0.825628 00:00
4 2.564142 0.820289 0.877327 00:00
5 1.778812 0.529067 0.909358 00:00
6 1.267308 0.454194 0.918139 00:00
7 0.953756 0.460074 0.912670 00:00
8 0.762700 0.445945 0.916491 00:00
9 0.645771 0.451389 0.918716 00:00
10 0.572739 0.453099 0.918546 00:00
11 0.528556 0.458762 0.918614 00:00
12 0.500923 0.470596 0.918784 00:00
13 0.484658 0.464671 0.916474 00:00
14 0.473280 0.464757 0.918920 00:00
15 0.464973 0.453933 0.918920 00:00
16 0.457714 0.438242 0.918971 00:00
17 0.451527 0.434950 0.918240 00:00
18 0.446123 0.431196 0.918903 00:00
19 0.441447 0.428390 0.918801 00:00
20 0.436984 0.426108 0.918852 00:00
21 0.433461 0.423290 0.919395 00:00
22 0.430805 0.422857 0.918869 00:00
23 0.428794 0.421856 0.917714 00:00
24 0.427836 0.424104 0.919582 00:00
25 0.427204 0.421568 0.918869 00:00
26 0.426329 0.422266 0.918818 00:00
27 0.425386 0.422684 0.918903 00:00
28 0.425470 0.421338 0.918903 00:00
29 0.425313 0.421826 0.918835 00:00
30 0.424621 0.422219 0.918852 00:00
31 0.424393 0.421966 0.916593 00:00
32 0.424456 0.421035 0.918852 00:00
33 0.424005 0.420840 0.918767 00:00
34 0.423243 0.420765 0.918308 00:00
35 0.423248 0.420912 0.918869 00:00
36 0.423039 0.421797 0.918869 00:00
37 0.422846 0.420816 0.918852 00:00
38 0.422578 0.420531 0.918750 00:00
39 0.422143 0.420253 0.918920 00:00
40 0.422192 0.420137 0.918308 00:00
41 0.421710 0.420222 0.917188 00:00
42 0.421760 0.420095 0.918869 00:00
43 0.421348 0.420104 0.919158 00:00
44 0.421488 0.420145 0.918818 00:00
45 0.420881 0.420048 0.918920 00:00
46 0.420800 0.420038 0.918971 00:00
47 0.420852 0.420025 0.918835 00:00
48 0.420573 0.419994 0.918869 00:00
49 0.420338 0.419977 0.918597 00:00
50 0.420014 0.419995 0.918767 00:00
In [296]:
learn.recorder.plot_losses()
In [297]:
learn.recorder.plot_metrics()
In [298]:
# paths, number of samples
len(learn.data.train_ds), X.shape[0]
Out[298]:
(3058, 1372)
In [299]:
stop_cond = sorted([ridx+1 for ridx in hqshift._idx_root])
stop_cond
Out[299]:
[36, 64, 181, 1003, 1063, 1335]
In [300]:
y_hat = -2*np.ones(X.shape[0])

for q_p in range(X.shape[0]):
    m = learn.model
    #set batch size to 1
    learn.model.bs=1
    #turn off dropout
    m.eval()
    #reset hidden state
    m.reset()
    idxs = np.array([0, q_p+1])
    idxs = torch.tensor(idxs, device=device)
    
#     import pdb; pdb.set_trace()
    for _ in range(bptt):
        p = m.forward(idxs[None,:])
        cls = F.softmax(p[0], dim=1).argmax(dim=1)
        if cls[-1].item()==0: print(0); break
        idxs = torch.cat((idxs, cls[-1].unsqueeze(0)))
        if cls[-1].item() in stop_cond: break
        #import pdb;pdb.set_trace()
    #set the class label from the last index of the trajectory produced by HQuickShift
    y_hat[q_p] = hqshift.labels_[idxs[-1]-1] 
    print(q_p, y_hat[q_p])
#     import pdb; pdb.set_trace()
0 0.0
1 0.0
2 0.0
3 0.0
4 0.0
5 0.0
6 0.0
7 0.0
8 0.0
9 0.0
10 0.0
11 0.0
12 0.0
13 0.0
14 0.0
15 0.0
16 0.0
17 0.0
18 0.0
19 0.0
20 0.0
21 0.0
22 0.0
23 0.0
24 0.0
25 0.0
26 0.0
27 0.0
28 0.0
29 0.0
30 1.0
31 0.0
32 0.0
33 0.0
34 0.0
35 0.0
36 0.0
37 0.0
38 0.0
39 0.0
40 0.0
41 0.0
42 0.0
43 0.0
44 0.0
45 0.0
46 0.0
47 0.0
48 0.0
49 0.0
50 0.0
51 0.0
52 0.0
53 0.0
54 0.0
55 0.0
56 0.0
57 0.0
58 0.0
59 0.0
60 0.0
61 0.0
62 0.0
63 1.0
64 0.0
65 0.0
66 0.0
67 0.0
68 0.0
69 0.0
70 0.0
71 0.0
72 0.0
73 0.0
74 0.0
75 0.0
76 0.0
77 0.0
78 0.0
79 0.0
80 0.0
81 0.0
82 0.0
83 0.0
84 0.0
85 0.0
86 0.0
87 0.0
88 0.0
89 0.0
90 0.0
91 0.0
92 0.0
93 0.0
94 0.0
95 1.0
96 0.0
97 0.0
98 0.0
99 0.0
100 0.0
101 0.0
102 0.0
103 0.0
104 0.0
105 0.0
106 0.0
107 0.0
108 0.0
109 0.0
110 0.0
111 0.0
112 0.0
113 0.0
114 0.0
115 0.0
116 0.0
117 0.0
118 0.0
119 0.0
120 0.0
121 0.0
122 0.0
123 0.0
124 0.0
125 0.0
126 0.0
127 0.0
128 0.0
129 0.0
130 0.0
131 0.0
132 0.0
133 0.0
134 0.0
135 0.0
136 0.0
137 0.0
138 0.0
139 0.0
140 0.0
141 0.0
142 0.0
143 0.0
144 0.0
145 0.0
146 0.0
147 0.0
148 0.0
149 0.0
150 0.0
151 0.0
152 0.0
153 0.0
154 0.0
155 0.0
156 0.0
157 0.0
158 0.0
159 0.0
160 0.0
161 0.0
162 0.0
163 0.0
164 0.0
165 0.0
166 0.0
167 0.0
168 0.0
169 0.0
170 1.0
171 0.0
172 0.0
173 0.0
174 0.0
175 0.0
176 0.0
177 0.0
178 0.0
179 0.0
180 0.0
181 0.0
182 0.0
183 0.0
184 0.0
185 0.0
186 0.0
187 0.0
188 0.0
189 0.0
190 0.0
191 0.0
192 0.0
193 0.0
194 0.0
195 0.0
196 0.0
197 0.0
198 0.0
199 0.0
200 0.0
201 0.0
202 0.0
203 0.0
204 0.0
205 0.0
206 0.0
207 0.0
208 0.0
209 0.0
210 0.0
211 0.0
212 0.0
213 0.0
214 0.0
215 0.0
216 0.0
217 0.0
218 0.0
219 0.0
220 0.0
221 0.0
222 0.0
223 0.0
224 0.0
225 0.0
226 1.0
227 0.0
228 0.0
229 0.0
230 0.0
231 0.0
232 0.0
233 0.0
234 0.0
235 0.0
236 0.0
237 0.0
238 0.0
239 0.0
240 0.0
241 0.0
242 0.0
243 0.0
244 0.0
245 0.0
246 0.0
247 0.0
248 1.0
249 0.0
250 0.0
251 0.0
252 0.0
253 0.0
254 0.0
255 0.0
256 0.0
257 0.0
258 0.0
259 0.0
260 0.0
261 0.0
262 0.0
263 0.0
264 0.0
265 0.0
266 0.0
267 0.0
268 0.0
269 0.0
270 0.0
271 0.0
272 0.0
273 0.0
274 0.0
275 0.0
276 0.0
277 0.0
278 0.0
279 0.0
280 0.0
281 0.0
282 0.0
283 0.0
284 0.0
285 0.0
286 0.0
287 0.0
288 0.0
289 0.0
290 0.0
291 0.0
292 0.0
293 0.0
294 0.0
295 0.0
296 0.0
297 0.0
298 0.0
299 0.0
300 0.0
301 0.0
302 0.0
303 0.0
304 0.0
305 0.0
306 0.0
307 0.0
308 0.0
309 0.0
310 0.0
311 0.0
312 0.0
313 0.0
314 0.0
315 0.0
316 0.0
317 0.0
318 0.0
319 0.0
320 0.0
321 0.0
322 0.0
323 0.0
324 0.0
325 0.0
326 1.0
327 0.0
328 0.0
329 0.0
330 0.0
331 0.0
332 0.0
333 0.0
334 0.0
335 0.0
336 0.0
337 0.0
338 0.0
339 0.0
340 0.0
341 0.0
342 0.0
343 0.0
344 0.0
345 0.0
346 0.0
347 0.0
348 0.0
349 1.0
350 0.0
351 0.0
352 0.0
353 0.0
354 0.0
355 0.0
356 0.0
357 0.0
358 0.0
359 0.0
360 0.0
361 0.0
362 0.0
363 0.0
364 0.0
365 0.0
366 0.0
367 0.0
368 0.0
369 0.0
370 0.0
371 0.0
372 0.0
373 0.0
374 0.0
375 0.0
376 0.0
377 0.0
378 0.0
379 0.0
380 0.0
381 0.0
382 0.0
383 0.0
384 0.0
385 0.0
386 0.0
387 0.0
388 0.0
389 0.0
390 0.0
391 0.0
392 0.0
393 0.0
394 0.0
395 0.0
396 0.0
397 0.0
398 0.0
399 0.0
400 0.0
401 0.0
402 0.0
403 0.0
404 1.0
405 0.0
406 0.0
407 0.0
408 0.0
409 0.0
410 0.0
411 0.0
412 0.0
413 0.0
414 0.0
415 0.0
416 0.0
417 0.0
418 0.0
419 0.0
420 0.0
421 0.0
422 0.0
423 0.0
424 0.0
425 0.0
426 0.0
427 0.0
428 0.0
429 0.0
430 0.0
431 0.0
432 0.0
433 0.0
434 0.0
435 0.0
436 1.0
437 0.0
438 0.0
439 0.0
440 0.0
441 0.0
442 0.0
443 0.0
444 0.0
445 0.0
446 0.0
447 0.0
448 0.0
449 0.0
450 0.0
451 0.0
452 0.0
453 0.0
454 0.0
455 0.0
456 0.0
457 0.0
458 0.0
459 0.0
460 0.0
461 0.0
462 0.0
463 0.0
464 0.0
465 0.0
466 0.0
467 0.0
468 0.0
469 0.0
470 0.0
471 0.0
472 0.0
473 0.0
474 0.0
475 0.0
476 1.0
477 0.0
478 0.0
479 0.0
480 0.0
481 0.0
482 0.0
483 0.0
484 0.0
485 0.0
486 0.0
487 0.0
488 0.0
489 0.0
490 0.0
491 0.0
492 0.0
493 0.0
494 0.0
495 0.0
496 0.0
497 0.0
498 1.0
499 0.0
500 0.0
501 0.0
502 0.0
503 0.0
504 0.0
505 0.0
506 0.0
507 0.0
508 0.0
509 0.0
510 0.0
511 0.0
512 0.0
513 0.0
514 0.0
515 0.0
516 0.0
517 0.0
518 0.0
519 0.0
520 0.0
521 0.0
522 0.0
523 0.0
524 0.0
525 0.0
526 0.0
527 0.0
528 0.0
529 0.0
530 0.0
531 0.0
532 0.0
533 0.0
534 0.0
535 0.0
536 0.0
537 0.0
538 0.0
539 0.0
540 0.0
541 0.0
542 0.0
543 0.0
544 0.0
545 0.0
546 0.0
547 0.0
548 0.0
549 0.0
550 0.0
551 0.0
552 0.0
553 0.0
554 0.0
555 0.0
556 0.0
557 0.0
558 0.0
559 0.0
560 0.0
561 0.0
562 0.0
563 0.0
564 0.0
565 0.0
566 0.0
567 0.0
568 0.0
569 0.0
570 0.0
571 0.0
572 0.0
573 0.0
574 0.0
575 0.0
576 0.0
577 0.0
578 0.0
579 0.0
580 0.0
581 0.0
582 0.0
583 0.0
584 0.0
585 0.0
586 0.0
587 0.0
588 0.0
589 0.0
590 0.0
591 0.0
592 0.0
593 0.0
594 0.0
595 0.0
596 0.0
597 0.0
598 0.0
599 0.0
600 0.0
601 0.0
602 0.0
603 0.0
604 0.0
605 0.0
606 0.0
607 0.0
608 0.0
609 0.0
610 0.0
611 0.0
612 0.0
613 0.0
614 0.0
615 0.0
616 0.0
617 0.0
618 0.0
619 0.0
620 0.0
621 0.0
622 0.0
623 0.0
624 0.0
625 0.0
626 0.0
627 0.0
628 0.0
629 0.0
630 0.0
631 0.0
632 0.0
633 0.0
634 0.0
635 0.0
636 0.0
637 0.0
638 0.0
639 0.0
640 0.0
641 0.0
642 0.0
643 0.0
644 0.0
645 0.0
646 0.0
647 0.0
648 0.0
649 0.0
650 0.0
651 0.0
652 0.0
653 0.0
654 0.0
655 0.0
656 0.0
657 0.0
658 0.0
659 0.0
660 0.0
661 0.0
662 0.0
663 0.0
664 0.0
665 0.0
666 0.0
667 0.0
668 1.0
669 0.0
670 0.0
671 0.0
672 0.0
673 0.0
674 0.0
675 0.0
676 0.0
677 0.0
678 0.0
679 0.0
680 0.0
681 0.0
682 0.0
683 0.0
684 0.0
685 0.0
686 0.0
687 0.0
688 0.0
689 0.0
690 0.0
691 1.0
692 0.0
693 0.0
694 1.0
695 0.0
696 0.0
697 0.0
698 0.0
699 0.0
700 0.0
701 0.0
702 0.0
703 0.0
704 0.0
705 0.0
706 0.0
707 0.0
708 0.0
709 0.0
710 0.0
711 0.0
712 0.0
713 0.0
714 0.0
715 0.0
716 0.0
717 0.0
718 0.0
719 0.0
720 0.0
721 0.0
722 0.0
723 0.0
724 0.0
725 0.0
726 0.0
727 0.0
728 1.0
729 0.0
730 0.0
731 0.0
732 0.0
733 0.0
734 0.0
735 0.0
736 0.0
737 0.0
738 0.0
739 0.0
740 0.0
741 0.0
742 0.0
743 0.0
744 0.0
745 0.0
746 0.0
747 0.0
748 0.0
749 0.0
750 0.0
751 0.0
752 0.0
753 0.0
754 0.0
755 0.0
756 0.0
757 0.0
758 0.0
759 0.0
760 0.0
761 0.0
762 1.0
763 1.0
764 1.0
765 1.0
766 1.0
767 1.0
768 1.0
769 1.0
770 1.0
771 1.0
772 1.0
773 1.0
774 1.0
775 1.0
776 1.0
777 1.0
778 1.0
779 1.0
780 1.0
781 1.0
782 1.0
783 1.0
784 1.0
785 1.0
786 1.0
787 1.0
788 1.0
789 1.0
790 1.0
791 1.0
792 1.0
793 1.0
794 1.0
795 1.0
796 1.0
797 1.0
798 1.0
799 1.0
800 1.0
801 1.0
802 1.0
803 1.0
804 1.0
805 1.0
806 1.0
807 1.0
808 1.0
809 1.0
810 1.0
811 1.0
812 1.0
813 1.0
814 1.0
815 1.0
816 1.0
817 1.0
818 1.0
819 1.0
820 1.0
821 1.0
822 1.0
823 1.0
824 1.0
825 1.0
826 1.0
827 1.0
828 1.0
829 1.0
830 1.0
831 1.0
832 1.0
833 1.0
834 1.0
835 1.0
836 1.0
837 1.0
838 1.0
839 1.0
840 1.0
841 1.0
842 1.0
843 1.0
844 1.0
845 1.0
846 1.0
847 1.0
848 1.0
849 1.0
850 1.0
851 1.0
852 1.0
853 1.0
854 1.0
855 1.0
856 1.0
857 1.0
858 1.0
859 1.0
860 1.0
861 1.0
862 1.0
863 1.0
864 1.0
865 1.0
866 1.0
867 1.0
868 1.0
869 1.0
870 1.0
871 1.0
872 1.0
873 1.0
874 1.0
875 1.0
876 1.0
877 1.0
878 1.0
879 1.0
880 1.0
881 1.0
882 1.0
883 1.0
884 1.0
885 1.0
886 1.0
887 1.0
888 1.0
889 1.0
890 1.0
891 1.0
892 1.0
893 1.0
894 1.0
895 1.0
896 1.0
897 1.0
898 1.0
899 1.0
900 1.0
901 1.0
902 1.0
903 1.0
904 1.0
905 1.0
906 1.0
907 1.0
908 1.0
909 1.0
910 1.0
911 1.0
912 1.0
913 1.0
914 1.0
915 1.0
916 1.0
917 1.0
918 1.0
919 1.0
920 1.0
921 1.0
922 1.0
923 1.0
924 1.0
925 1.0
926 1.0
927 1.0
928 1.0
929 1.0
930 1.0
931 1.0
932 1.0
933 1.0
934 1.0
935 1.0
936 1.0
937 1.0
938 1.0
939 1.0
940 1.0
941 1.0
942 1.0
943 1.0
944 1.0
945 1.0
946 1.0
947 1.0
948 1.0
949 1.0
950 1.0
951 1.0
952 1.0
953 1.0
954 1.0
955 1.0
956 1.0
957 1.0
958 1.0
959 1.0
960 1.0
961 1.0
962 1.0
963 1.0
964 1.0
965 1.0
966 1.0
967 1.0
968 1.0
969 1.0
970 1.0
971 1.0
972 1.0
973 1.0
974 1.0
975 1.0
976 1.0
977 1.0
978 1.0
979 1.0
980 1.0
981 1.0
982 1.0
983 1.0
984 1.0
985 1.0
986 1.0
987 1.0
988 1.0
989 1.0
990 1.0
991 1.0
992 1.0
993 1.0
994 1.0
995 1.0
996 1.0
997 1.0
998 1.0
999 1.0
1000 1.0
1001 1.0
1002 1.0
1003 1.0
1004 1.0
1005 1.0
1006 1.0
1007 1.0
1008 1.0
1009 1.0
1010 1.0
1011 1.0
1012 1.0
1013 1.0
1014 1.0
1015 1.0
1016 1.0
1017 1.0
1018 1.0
1019 1.0
1020 1.0
1021 1.0
1022 1.0
1023 1.0
1024 1.0
1025 1.0
1026 1.0
1027 1.0
1028 1.0
1029 1.0
1030 1.0
1031 1.0
1032 1.0
1033 1.0
1034 1.0
1035 1.0
1036 1.0
1037 1.0
1038 1.0
1039 1.0
1040 1.0
1041 1.0
1042 1.0
1043 1.0
1044 1.0
1045 1.0
1046 1.0
1047 1.0
1048 1.0
1049 1.0
1050 1.0
1051 1.0
1052 1.0
1053 1.0
1054 1.0
1055 1.0
1056 1.0
1057 1.0
1058 1.0
1059 1.0
1060 1.0
1061 1.0
1062 0.0
1063 1.0
1064 1.0
1065 1.0
1066 1.0
1067 1.0
1068 1.0
1069 1.0
1070 1.0
1071 1.0
1072 1.0
1073 1.0
1074 1.0
1075 1.0
1076 1.0
1077 1.0
1078 1.0
1079 1.0
1080 1.0
1081 1.0
1082 1.0
1083 1.0
1084 1.0
1085 1.0
1086 1.0
1087 1.0
1088 1.0
1089 1.0
1090 1.0
1091 1.0
1092 1.0
1093 1.0
1094 1.0
1095 1.0
1096 1.0
1097 1.0
1098 1.0
1099 1.0
1100 1.0
1101 1.0
1102 1.0
1103 1.0
1104 1.0
1105 1.0
1106 1.0
1107 1.0
1108 1.0
1109 1.0
1110 1.0
1111 1.0
1112 1.0
1113 1.0
1114 1.0
1115 1.0
1116 1.0
1117 1.0
1118 1.0
1119 1.0
1120 1.0
1121 1.0
1122 1.0
1123 1.0
1124 1.0
1125 1.0
1126 1.0
1127 1.0
1128 1.0
1129 1.0
1130 1.0
1131 1.0
1132 1.0
1133 1.0
1134 1.0
1135 1.0
1136 1.0
1137 1.0
1138 1.0
1139 1.0
1140 1.0
1141 1.0
1142 1.0
1143 1.0
1144 1.0
1145 1.0
1146 1.0
1147 1.0
1148 1.0
1149 1.0
1150 1.0
1151 1.0
1152 1.0
1153 1.0
1154 1.0
1155 1.0
1156 1.0
1157 1.0
1158 1.0
1159 1.0
1160 1.0
1161 1.0
1162 1.0
1163 1.0
1164 1.0
1165 1.0
1166 1.0
1167 1.0
1168 1.0
1169 1.0
1170 1.0
1171 1.0
1172 1.0
1173 1.0
1174 1.0
1175 1.0
1176 1.0
1177 1.0
1178 1.0
1179 1.0
1180 1.0
1181 1.0
1182 1.0
1183 1.0
1184 1.0
1185 1.0
1186 1.0
1187 1.0
1188 1.0
1189 1.0
1190 1.0
1191 1.0
1192 1.0
1193 1.0
1194 1.0
1195 1.0
1196 1.0
1197 1.0
1198 1.0
1199 1.0
1200 1.0
1201 1.0
1202 1.0
1203 1.0
1204 1.0
1205 1.0
1206 1.0
1207 1.0
1208 1.0
1209 1.0
1210 1.0
1211 1.0
1212 1.0
1213 1.0
1214 1.0
1215 1.0
1216 1.0
1217 1.0
1218 1.0
1219 1.0
1220 1.0
1221 1.0
1222 1.0
1223 1.0
1224 1.0
1225 1.0
1226 1.0
1227 1.0
1228 1.0
1229 1.0
1230 1.0
1231 1.0
1232 1.0
1233 1.0
1234 1.0
1235 1.0
1236 1.0
1237 1.0
1238 1.0
1239 1.0
1240 1.0
1241 1.0
1242 1.0
1243 1.0
1244 1.0
1245 1.0
1246 1.0
1247 1.0
1248 1.0
1249 1.0
1250 1.0
1251 1.0
1252 1.0
1253 1.0
1254 1.0
1255 1.0
1256 1.0
1257 1.0
1258 1.0
1259 1.0
1260 1.0
1261 1.0
1262 1.0
1263 1.0
1264 1.0
1265 1.0
1266 1.0
1267 1.0
1268 1.0
1269 1.0
1270 1.0
1271 1.0
1272 1.0
1273 1.0
1274 1.0
1275 1.0
1276 1.0
1277 1.0
1278 1.0
1279 1.0
1280 1.0
1281 1.0
1282 1.0
1283 1.0
1284 1.0
1285 1.0
1286 1.0
1287 1.0
1288 1.0
1289 1.0
1290 1.0
1291 1.0
1292 1.0
1293 1.0
1294 1.0
1295 1.0
1296 1.0
1297 1.0
1298 1.0
1299 1.0
1300 1.0
1301 1.0
1302 1.0
1303 1.0
1304 1.0
1305 1.0
1306 1.0
1307 1.0
1308 1.0
1309 1.0
1310 1.0
1311 1.0
1312 1.0
1313 1.0
1314 1.0
1315 1.0
1316 1.0
1317 1.0
1318 1.0
1319 1.0
1320 1.0
1321 1.0
1322 1.0
1323 1.0
1324 1.0
1325 1.0
1326 1.0
1327 1.0
1328 1.0
1329 1.0
1330 1.0
1331 1.0
1332 1.0
1333 1.0
1334 1.0
1335 1.0
1336 1.0
1337 1.0
1338 1.0
1339 1.0
1340 1.0
1341 1.0
1342 1.0
1343 1.0
1344 1.0
1345 1.0
1346 1.0
1347 1.0
1348 1.0
1349 1.0
1350 1.0
1351 1.0
1352 1.0
1353 1.0
1354 1.0
1355 1.0
1356 1.0
1357 1.0
1358 1.0
1359 1.0
1360 1.0
1361 1.0
1362 1.0
1363 1.0
1364 1.0
1365 1.0
1366 1.0
1367 1.0
1368 1.0
1369 1.0
1370 1.0
1371 1.0
In [301]:
print("Adj. Rand Index Score: %f." % adjusted_rand_score(y_hat, y))
print("Adj. Mutual Info Score: %f." % adjusted_mutual_info_score(y_hat, y))
print('Classes: ', np.unique(y_hat), 'n_clusters: ', np.unique(y_hat)[-1]+1)
Adj. Rand Index Score: 0.951012.
Adj. Mutual Info Score: 0.909999.
Classes:  [0. 1.] n_clusters:  2.0

RNN Training Part

In this section we present batch way to produce the experiment results:

In [324]:
fileList2Train = ["data_sets/00_bunch/banknote/banknote.csv",
                  "data_sets/00_bunch/glass/glass.csv",
                  "data_sets/00_bunch/iris/iris.csv",
                  "data_sets/00_bunch/mnist/mnist.csv",
                  "data_sets/00_bunch/seeds/seeds.csv",
                  "data_sets/01_bunch/phoneme/phoneme.csv",
                  "data_sets/02_bunch/page-blocks/page-blocks.csv"]
datasets = ['banknote', 'glass', 'iris', 'mnist', 'seeds', 'phoneme', 'page-blocks']

# NN Settings
nh     = 32
bs     = 64
bptt   = 20
lr     = 1e-2
n_iter = 50

perf_output = []

class Model_Density(nn.Module):
    def __init__(self, nv, nh, bs):
        super().__init__()
        self.nv = nv
        self.nh = nh
        self.bs = bs
        self.i_h = nn.Embedding(self.nv,self.nh)
        self.rnn = nn.GRU(self.nh, self.nh, 2, batch_first=True)
        self.h_o = nn.Linear(self.nh, self.nv)
        self.bn = BatchNorm1dFlat(self.nh)
        self.reset()
    def reset(self):
        self.h = torch.zeros(2, self.bs, self.nh).cuda()  
    def forward(self, x):
        res,h = self.rnn(self.i_h(x), self.h)
        self.h = h.detach()
        return self.h_o(self.bn(res))

for ifn, fn2T in enumerate(fileList2Train):
    print('\n\nFile Name to Train: ', fn2T)
    print('#############################################################')
    data0 = pd.read_csv(fn2T, header=None)
    X = data0.iloc[:,:-1].values
    y = data0.iloc[:, -1].values
    if X.shape[1] > 4: 
        X = umap.UMAP(
            n_neighbors=50,
            min_dist=0.0,
            n_components=4,
            random_state=seed,
        ).fit_transform(X)
        
    #parameter definition
    best_min_cluster_size = 17
    if X.shape[0] > 1000:
        best_min_cluster_size = 125
    best_min_sample_size = 3
    best_min_mode_size = best_min_cluster_size
    
    # HDBSCAN
    print('HDBSCAN')
    print('#############################################################')
    clusterer = hdbscan.HDBSCAN(min_cluster_size=best_min_cluster_size, min_samples=best_min_sample_size)
    y_hat = clusterer.fit_predict(X)
    print("Adj. Rand Index Score: %f." % adjusted_rand_score(y_hat, y))
    print("Adj. Mutual Info Score: %f." % adjusted_mutual_info_score(y_hat, y))
    print('Classes: ', np.unique(y_hat), 'n_clusters: ', np.unique(y_hat)[-1]+1)
    perf_output.append((adjusted_rand_score(y_hat, y), adjusted_mutual_info_score(y_hat, y)))
    # QSHIFTPP
    print('QSHIFTPP')
    print('#############################################################')
    X_qspp = X.copy(order='C')
    k=25
    if X.shape [0] > 1000:
        k = 60
    model = QuickshiftPP(k=k, beta=.6)
    # Compute the clustering.
    model.fit(X_qspp)
    y_hat = model.memberships
    print("Adj. Rand Index Score: %f." % adjusted_rand_score(y_hat, y))
    print("Adj. Mutual Info Score: %f." % adjusted_mutual_info_score(y_hat, y))
    print('Classes: ', np.unique(y_hat), 'n_clusters: ', np.unique(y_hat)[-1]+1)
    perf_output.append((adjusted_rand_score(y_hat, y), adjusted_mutual_info_score(y_hat, y)))
    
    #generate paths for RNN with HQSHIFT
    print('HQSHIFT')
    print('#############################################################')
    hqshift = HQSHIFT(min_cluster_size=best_min_cluster_size,
                      min_mode_size=best_min_mode_size,
                      min_samples=best_min_sample_size,
                      allow_single_cluster=False).fit(X)
    # cluster labels
    y_hat = hqshift.labels_
    print("Adj. Rand Index Score: %f." % adjusted_rand_score(y_hat, y))
    print("Adj. Mutual Info Score: %f." % adjusted_mutual_info_score(y_hat, y))
    print('Classes: ', np.unique(y_hat), 'n_clusters: ', np.unique(y_hat)[-1]+1)
    
    
    clust_paths = [[j]+pth[:pth.index(idx)+1] for idx in hqshift._idx_root 
                                              for j, pth in enumerate(hqshift.paths) if idx in pth]
    clust_data = [X[pth] for pth in clust_paths] 
    # Construct Vocabulary, make sure BOS (<xxbos>) idx is 0
    vocab_tok = [BOS] + [str(idx) for idx in np.arange(X.shape[0])]
    # => ['<pad>', '0', '1', '2', '3',... 'seq_len-1']
    vocab_ = {idx:tok for idx,tok in enumerate(vocab_tok)}
    ids = [[0]+list(np.array(pth)+1) for pth in clust_paths]
    vocab=Vocab(vocab_)
    nv = len(vocab.itos)
    ids = np.array(ids)
    data = TextLMDataBunch.from_ids('/tmp', vocab=vocab,
                                    train_ids=ids, valid_ids=ids,
                                    bs=bs, bptt=bptt, device=device)
    for inet in range(0, 10):
        #Learner
        random_seed(seed_value=seed_lst[inet], use_cuda=use_cuda)
        print('Seed: ', seed_lst[inet])
        learn = Learner(data, Model_Density(nv, nh, bs), metrics=accuracy)
        learn.fit_one_cycle(n_iter, lr);
        learn.recorder.plot_losses()
        plt.show()
        learn.recorder.plot_metrics()
        plt.xlabel('Batches Processed')
        plt.ylabel('Accuracy [%]')
        plt.show()
        learn.save(datapath4file(str(URLs.LOCAL_PATH/'models')+'/'+datasets[ifn]+'%02d'%inet), return_path=True)
        print('Paths: ', len(learn.data.train_ds), ', Tokens: ', X.shape[0])
        stop_cond = sorted([ridx+1 for ridx in hqshift._idx_root])
        print(stop_cond)
        # calculate cluster validation scores with trained RNN
        y_hat = -2*np.ones(X.shape[0])
        for q_p in range(X.shape[0]):
            m = learn.model
            #set batch size to 1
            learn.model.bs=1
            #turn off dropout
            m.eval()
            #reset hidden state
            m.reset()
            idxs = np.array([0, q_p+1])
            idxs = torch.tensor(idxs, device=device)

    #         import pdb; pdb.set_trace()
            for _ in range(bptt):
                p = m.forward(idxs[None,:])
                cls = F.softmax(p[0], dim=1).argmax(dim=1)
                if cls[-1].item()==0: print(0); break
                idxs = torch.cat((idxs, cls[-1].unsqueeze(0)))
                if cls[-1].item() in stop_cond: break
    #             import pdb;pdb.set_trace()
            #set the class label from the last index of the trajectory produced by HQuickShift
            y_hat[q_p] = hqshift.labels_[idxs[-1]-1] 
    #         print(q_p, y_hat[q_p])

        print("Adj. Rand Index Score: %f." % adjusted_rand_score(y_hat, y))
        print("Adj. Mutual Info Score: %f." % adjusted_mutual_info_score(y_hat, y))
        perf_output.append((adjusted_rand_score(y_hat, y), adjusted_mutual_info_score(y_hat, y)))
        print('Classes: ', np.unique(y_hat), 'n_clusters: ', np.unique(y_hat)[-1]+1)

File Name to Train:  data_sets/00_bunch/banknote/banknote.csv
#############################################################
HDBSCAN
#############################################################
Adj. Rand Index Score: 0.792122.
Adj. Mutual Info Score: 0.625601.
Classes:  [-1  0  1] n_clusters:  2
QSHIFTPP
#############################################################
Adj. Rand Index Score: 0.690146.
Adj. Mutual Info Score: 0.555389.
Classes:  [0 1 2 3] n_clusters:  4
HQSHIFT
#############################################################
Adj. Rand Index Score: 0.798521.
Adj. Mutual Info Score: 0.632384.
Classes:  [-1  0  1] n_clusters:  2
Seed:  0
epoch train_loss valid_loss accuracy time
1 7.044263 6.920993 0.034001 00:00
2 6.709130 6.297531 0.356641 00:00
3 6.107814 5.237638 0.546450 00:00
4 4.962787 3.358989 0.641525 00:00
5 3.415560 1.836544 0.730503 00:00
6 2.213147 1.153419 0.809918 00:00
7 1.438570 0.770817 0.873302 00:00
8 0.972705 0.585025 0.898947 00:00
9 0.715531 0.502509 0.910785 00:00
10 0.578538 0.474654 0.915591 00:00
11 0.508781 0.454988 0.918088 00:00
12 0.474802 0.448446 0.918699 00:00
13 0.456393 0.439007 0.918733 00:00
14 0.447378 0.439361 0.918716 00:00
15 0.444240 0.437068 0.918648 00:00
16 0.440355 0.436687 0.916457 00:00
17 0.439709 0.438742 0.918784 00:00
18 0.437227 0.432368 0.918733 00:00
19 0.437289 0.433192 0.918733 00:00
20 0.435506 0.438019 0.914368 00:00
21 0.434909 0.428678 0.916899 00:00
22 0.432849 0.431241 0.918716 00:00
23 0.433434 0.432313 0.918580 00:00
24 0.433142 0.427785 0.918716 00:00
25 0.433027 0.428560 0.918716 00:00
26 0.430004 0.428991 0.918835 00:00
27 0.431269 0.428593 0.918529 00:00
28 0.430494 0.429136 0.918716 00:00
29 0.429403 0.428048 0.918767 00:00
30 0.428515 0.427745 0.918767 00:00
31 0.427537 0.426975 0.918733 00:00
32 0.427245 0.426009 0.918733 00:00
33 0.427191 0.424321 0.918716 00:00
34 0.426557 0.424623 0.918733 00:00
35 0.425969 0.425220 0.918546 00:00
36 0.426189 0.423173 0.918869 00:00
37 0.425272 0.423195 0.918818 00:00
38 0.424256 0.422451 0.918767 00:00
39 0.423347 0.421978 0.918784 00:00
40 0.423587 0.422239 0.918818 00:00
41 0.423398 0.421616 0.918733 00:00
42 0.423496 0.421372 0.918801 00:00
43 0.423867 0.421241 0.918835 00:00
44 0.422994 0.421092 0.918852 00:00
45 0.422552 0.420766 0.918818 00:00
46 0.421451 0.420684 0.918818 00:00
47 0.420899 0.420724 0.918767 00:00
48 0.421617 0.420717 0.918852 00:00
49 0.421566 0.420641 0.918784 00:00
50 0.421076 0.420732 0.918750 00:00
/home/can/anaconda3/envs/hqshift/lib/python3.6/site-packages/fastai/datasets.py:153: YAMLLoadWarning: calling yaml.load() without Loader=... is deprecated, as the default Loader is unsafe. Please read https://msg.pyyaml.org/load for full details.
  with open(fpath, 'r') as yaml_file: return yaml.load(yaml_file)
Paths:  3058 , Tokens:  1372
[36, 64, 181, 1003, 1063, 1335]
Adj. Rand Index Score: 0.951012.
Adj. Mutual Info Score: 0.909999.
Classes:  [0. 1.] n_clusters:  2.0
Seed:  23
epoch train_loss valid_loss accuracy time
1 7.053731 6.923075 0.075798 00:00
2 6.705861 6.291873 0.308509 00:00
3 6.091847 5.203226 0.537415 00:00
4 4.940405 3.343619 0.632269 00:00
5 3.405858 1.835058 0.734885 00:00
6 2.198524 1.134763 0.816746 00:00
7 1.427225 0.770289 0.864419 00:00
8 0.969620 0.586738 0.900136 00:00
9 0.715642 0.499854 0.912245 00:00
10 0.579251 0.464015 0.916814 00:00
11 0.507217 0.455231 0.916372 00:00
12 0.473177 0.455472 0.918665 00:00
13 0.457231 0.446979 0.918750 00:00
14 0.449020 0.443722 0.918631 00:00
15 0.443532 0.438399 0.918750 00:00
16 0.440627 0.434290 0.918665 00:00
17 0.438735 0.438620 0.918682 00:00
18 0.437438 0.434133 0.918648 00:00
19 0.436779 0.434982 0.914759 00:00
20 0.435304 0.438316 0.918716 00:00
21 0.433747 0.438539 0.912908 00:00
22 0.433081 0.436179 0.918750 00:00
23 0.430437 0.432759 0.918801 00:00
24 0.429796 0.429544 0.918733 00:00
25 0.429453 0.430152 0.918852 00:00
26 0.428877 0.426628 0.918733 00:00
27 0.428120 0.427818 0.918665 00:00
28 0.428686 0.428132 0.918767 00:00
29 0.427650 0.425387 0.918784 00:00
30 0.427140 0.425413 0.918852 00:00
31 0.427017 0.427611 0.918767 00:00
32 0.425495 0.423974 0.918801 00:00
33 0.424709 0.423743 0.918784 00:00
34 0.425142 0.424183 0.918801 00:00
35 0.422795 0.423450 0.918818 00:00
36 0.423353 0.423185 0.918818 00:00
37 0.424462 0.422455 0.918801 00:00
38 0.423538 0.422485 0.918818 00:00
39 0.422874 0.421994 0.918818 00:00
40 0.423245 0.421959 0.918835 00:00
41 0.423398 0.421555 0.918852 00:00
42 0.422301 0.421106 0.918801 00:00
43 0.423346 0.420832 0.918818 00:00
44 0.422480 0.420718 0.918801 00:00
45 0.423267 0.420693 0.918835 00:00
46 0.422162 0.420550 0.918818 00:00
47 0.421325 0.420569 0.918835 00:00
48 0.421699 0.420551 0.918801 00:00
49 0.419644 0.420537 0.918801 00:00
50 0.420277 0.420509 0.918801 00:00
Paths:  3058 , Tokens:  1372
[36, 64, 181, 1003, 1063, 1335]
Adj. Rand Index Score: 0.951012.
Adj. Mutual Info Score: 0.909999.
Classes:  [0. 1.] n_clusters:  2.0
Seed:  42
epoch train_loss valid_loss accuracy time
1 6.987081 6.826691 0.104535 00:00
2 6.564578 6.066473 0.326613 00:00
3 5.871880 4.897140 0.512704 00:00
4 4.678733 3.105808 0.634222 00:00
5 3.199978 1.725034 0.743835 00:00
6 2.071503 1.091280 0.821246 00:00
7 1.365065 0.754912 0.873013 00:00
8 0.941880 0.592287 0.898115 00:00
9 0.702985 0.504306 0.909273 00:00
10 0.574382 0.468194 0.916661 00:00
11 0.505428 0.449421 0.918224 00:00
12 0.471008 0.453906 0.916508 00:00
13 0.457015 0.453297 0.915829 00:00
14 0.451132 0.451250 0.918682 00:00
15 0.444567 0.439409 0.918325 00:00
16 0.443640 0.438300 0.913689 00:00
17 0.441540 0.440561 0.918393 00:00
18 0.437770 0.436433 0.918699 00:00
19 0.436996 0.433346 0.918784 00:00
20 0.435860 0.430529 0.918784 00:00
21 0.435087 0.431886 0.918665 00:00
22 0.433687 0.430996 0.918648 00:00
23 0.433275 0.431775 0.918597 00:00
24 0.432019 0.434081 0.918614 00:00
25 0.431902 0.430050 0.918801 00:00
26 0.431993 0.427122 0.918784 00:00
27 0.431229 0.429217 0.918733 00:00
28 0.430194 0.425386 0.918784 00:00
29 0.430812 0.427755 0.918869 00:00
30 0.429447 0.425156 0.916593 00:00
31 0.427334 0.426781 0.918733 00:00
32 0.427351 0.426650 0.918852 00:00
33 0.427379 0.425408 0.918716 00:00
34 0.427864 0.426067 0.918767 00:00
35 0.425298 0.423778 0.918835 00:00
36 0.424343 0.424733 0.918750 00:00
37 0.425830 0.423472 0.918801 00:00
38 0.423263 0.422754 0.918733 00:00
39 0.423025 0.422460 0.918767 00:00
40 0.423453 0.422227 0.918716 00:00
41 0.423430 0.421678 0.918784 00:00
42 0.423687 0.421368 0.918835 00:00
43 0.422789 0.420994 0.918818 00:00
44 0.420430 0.421228 0.918818 00:00
45 0.421983 0.421247 0.918767 00:00
46 0.422362 0.420924 0.918852 00:00
47 0.421204 0.420894 0.918784 00:00
48 0.420436 0.420927 0.918801 00:00
49 0.422097 0.420892 0.918750 00:00
50 0.421451 0.420992 0.918767 00:00
Paths:  3058 , Tokens:  1372
[36, 64, 181, 1003, 1063, 1335]
Adj. Rand Index Score: 0.951012.
Adj. Mutual Info Score: 0.909999.
Classes:  [0. 1.] n_clusters:  2.0
Seed:  1234
epoch train_loss valid_loss accuracy time
1 7.041072 6.925368 0.062143 00:00
2 6.719156 6.328650 0.317086 00:00
3 6.139575 5.294110 0.494480 00:00
4 5.017441 3.409136 0.610955 00:00
5 3.482327 1.907480 0.719005 00:00
6 2.258461 1.171008 0.813638 00:00
7 1.461064 0.773753 0.867086 00:00
8 0.979859 0.592329 0.898251 00:00
9 0.719229 0.501040 0.907677 00:00
10 0.581269 0.479114 0.916117 00:00
11 0.510892 0.455712 0.917867 00:00
12 0.474027 0.450686 0.918461 00:00
13 0.457504 0.449055 0.918767 00:00
14 0.447438 0.439815 0.918682 00:00
15 0.442772 0.433869 0.918699 00:00
16 0.441110 0.436493 0.916559 00:00
17 0.439615 0.435613 0.918224 00:00
18 0.437315 0.436451 0.916474 00:00
19 0.435990 0.430930 0.918818 00:00
20 0.432648 0.435101 0.918767 00:00
21 0.431487 0.431179 0.916627 00:00
22 0.430845 0.429775 0.918699 00:00
23 0.431270 0.432654 0.918835 00:00
24 0.430959 0.429887 0.913689 00:00
25 0.430123 0.428822 0.917629 00:00
26 0.430601 0.426298 0.918784 00:00
27 0.429596 0.425177 0.918784 00:00
28 0.428499 0.427447 0.918784 00:00
29 0.428706 0.428138 0.918801 00:00
30 0.427106 0.423735 0.918784 00:00
31 0.427071 0.425217 0.918784 00:00
32 0.425971 0.424252 0.918767 00:00
33 0.425205 0.424537 0.918835 00:00
34 0.424279 0.423838 0.918784 00:00
35 0.424163 0.425170 0.918801 00:00
36 0.423792 0.422845 0.918767 00:00
37 0.422903 0.421615 0.918767 00:00
38 0.422389 0.422464 0.918818 00:00
39 0.422402 0.421632 0.918801 00:00
40 0.423566 0.421729 0.918869 00:00
41 0.423396 0.421433 0.918801 00:00
42 0.421717 0.421293 0.918784 00:00
43 0.421680 0.421047 0.918767 00:00
44 0.421123 0.420807 0.918750 00:00
45 0.420797 0.420695 0.918801 00:00
46 0.420417 0.420614 0.918801 00:00
47 0.420685 0.420612 0.918818 00:00
48 0.419477 0.420571 0.918801 00:00
49 0.419370 0.420672 0.918716 00:00
50 0.419966 0.420539 0.918784 00:00
Paths:  3058 , Tokens:  1372
[36, 64, 181, 1003, 1063, 1335]
Adj. Rand Index Score: 0.951012.
Adj. Mutual Info Score: 0.909999.
Classes:  [0. 1.] n_clusters:  2.0
Seed:  43210
epoch train_loss valid_loss accuracy time
1 7.000233 6.864887 0.087007 00:00
2 6.638663 6.205266 0.306301 00:00
3 6.007813 5.123574 0.482490 00:00
4 4.837506 3.245512 0.635564 00:00
5 3.319189 1.799670 0.746501 00:00
6 2.148674 1.116702 0.820007 00:00
7 1.398739 0.770765 0.869039 00:00
8 0.953240 0.581061 0.899660 00:00
9 0.705338 0.504975 0.911872 00:00
10 0.574363 0.471839 0.912755 00:00
11 0.507599 0.461713 0.918190 00:00
12 0.473118 0.449559 0.918461 00:00
13 0.458132 0.447005 0.918682 00:00
14 0.450020 0.441749 0.911872 00:00
15 0.445066 0.441291 0.915625 00:00
16 0.440497 0.435241 0.918648 00:00
17 0.440310 0.438298 0.918699 00:00
18 0.438940 0.438771 0.918682 00:00
19 0.437408 0.433363 0.918750 00:00
20 0.437045 0.432919 0.918699 00:00
21 0.435915 0.433297 0.918801 00:00
22 0.433438 0.432543 0.918716 00:00
23 0.431512 0.430205 0.918767 00:00
24 0.431592 0.432824 0.918665 00:00
25 0.432389 0.430447 0.918784 00:00
26 0.431402 0.429243 0.916593 00:00
27 0.430109 0.428114 0.918818 00:00
28 0.430301 0.429901 0.918852 00:00
29 0.429586 0.427824 0.918852 00:00
30 0.427872 0.427190 0.918682 00:00
31 0.427868 0.427096 0.918784 00:00
32 0.427223 0.426341 0.918852 00:00
33 0.426060 0.425791 0.918801 00:00
34 0.426422 0.425096 0.918767 00:00
35 0.425961 0.424575 0.918750 00:00
36 0.424634 0.423621 0.918835 00:00
37 0.424830 0.423781 0.918869 00:00
38 0.424511 0.422329 0.918767 00:00
39 0.423325 0.422689 0.918767 00:00
40 0.423530 0.422266 0.918852 00:00
41 0.423336 0.421982 0.918682 00:00
42 0.420949 0.421729 0.918784 00:00
43 0.423495 0.421139 0.918920 00:00
44 0.423161 0.421075 0.918801 00:00
45 0.421790 0.420817 0.918920 00:00
46 0.421506 0.420869 0.918818 00:00
47 0.420549 0.420756 0.918818 00:00
48 0.420940 0.420681 0.918767 00:00
49 0.421933 0.420645 0.918801 00:00
50 0.420552 0.420679 0.918801 00:00
Paths:  3058 , Tokens:  1372
[36, 64, 181, 1003, 1063, 1335]
Adj. Rand Index Score: 0.951012.
Adj. Mutual Info Score: 0.909999.
Classes:  [0. 1.] n_clusters:  2.0
Seed:  1133557799
epoch train_loss valid_loss accuracy time
1 7.049446 6.949334 0.055095 00:00
2 6.761670 6.400378 0.330961 00:00
3 6.218757 5.409724 0.545550 00:00
4 5.120669 3.535824 0.613706 00:00
5 3.556817 1.955474 0.713587 00:00
6 2.304996 1.193196 0.807320 00:00
7 1.487150 0.780008 0.868257 00:00
8 0.997112 0.584172 0.899440 00:00
9 0.724906 0.499759 0.908747 00:00
10 0.583030 0.465774 0.916593 00:00
11 0.508723 0.453594 0.918020 00:00
12 0.472751 0.446006 0.916321 00:00
13 0.458666 0.442097 0.913043 00:00
14 0.449044 0.440206 0.918597 00:00
15 0.441792 0.441118 0.918614 00:00
16 0.441676 0.439240 0.912432 00:00
17 0.438625 0.436639 0.918597 00:00
18 0.439759 0.435743 0.918716 00:00
19 0.434879 0.429838 0.918733 00:00
20 0.435320 0.431340 0.918733 00:00
21 0.434061 0.431900 0.918614 00:00
22 0.434051 0.432633 0.918648 00:00
23 0.433497 0.426929 0.918767 00:00
24 0.430877 0.429723 0.918784 00:00
25 0.430271 0.432859 0.918733 00:00
26 0.430525 0.430608 0.918767 00:00
27 0.428503 0.430302 0.918784 00:00
28 0.428112 0.426868 0.918784 00:00
29 0.428677 0.426774 0.918801 00:00
30 0.428151 0.426868 0.918852 00:00
31 0.427076 0.426058 0.918835 00:00
32 0.425716 0.425197 0.918818 00:00
33 0.425206 0.423866 0.918801 00:00
34 0.424298 0.423408 0.918784 00:00
35 0.424790 0.423344 0.918733 00:00
36 0.424036 0.423532 0.918801 00:00
37 0.423947 0.422492 0.918835 00:00
38 0.424023 0.422772 0.918903 00:00
39 0.423149 0.422385 0.918818 00:00
40 0.422681 0.421667 0.918835 00:00
41 0.421699 0.421557 0.918852 00:00
42 0.421929 0.421199 0.918869 00:00
43 0.421374 0.421039 0.918818 00:00
44 0.422078 0.420954 0.918784 00:00
45 0.421715 0.420715 0.918835 00:00
46 0.421536 0.420683 0.918818 00:00
47 0.421369 0.420682 0.918818 00:00
48 0.421097 0.420665 0.918852 00:00
49 0.420322 0.420654 0.918818 00:00
50 0.420508 0.420675 0.918801 00:00
Paths:  3058 , Tokens:  1372
[36, 64, 181, 1003, 1063, 1335]
Adj. Rand Index Score: 0.951012.
Adj. Mutual Info Score: 0.909999.
Classes:  [0. 1.] n_clusters:  2.0
Seed:  22446688
epoch train_loss valid_loss accuracy time
1 7.035093 6.891237 0.096128 00:00
2 6.659505 6.200901 0.341695 00:00
3 6.018675 5.088204 0.548234 00:00
4 4.832672 3.227494 0.632082 00:00
5 3.310291 1.792130 0.734884 00:00
6 2.147572 1.133278 0.817086 00:00
7 1.401762 0.757517 0.873217 00:00
8 0.957925 0.578125 0.899439 00:00
9 0.709361 0.506308 0.905010 00:00
10 0.576873 0.464529 0.916423 00:00
11 0.509092 0.455843 0.918003 00:00
12 0.474346 0.449380 0.918377 00:00
13 0.458797 0.446028 0.918580 00:00
14 0.449634 0.441383 0.918716 00:00
15 0.443025 0.444376 0.913808 00:00
16 0.440938 0.440231 0.918716 00:00
17 0.439740 0.437772 0.918716 00:00
18 0.439452 0.439375 0.918563 00:00
19 0.436726 0.440745 0.918699 00:00
20 0.435715 0.436867 0.918665 00:00
21 0.435113 0.431694 0.917561 00:00
22 0.434474 0.434232 0.918767 00:00
23 0.432918 0.431985 0.918733 00:00
24 0.432127 0.430401 0.918699 00:00
25 0.432430 0.429111 0.918699 00:00
26 0.431258 0.426426 0.918750 00:00
27 0.430045 0.429508 0.918835 00:00
28 0.429302 0.428440 0.918784 00:00
29 0.427248 0.425337 0.918784 00:00
30 0.426378 0.426546 0.918733 00:00
31 0.426356 0.427742 0.918733 00:00
32 0.426253 0.427252 0.918682 00:00
33 0.426043 0.424929 0.918750 00:00
34 0.426505 0.424642 0.918716 00:00
35 0.424912 0.425569 0.918784 00:00
36 0.425300 0.423565 0.918716 00:00
37 0.424603 0.423061 0.918784 00:00
38 0.423622 0.423866 0.918733 00:00
39 0.423569 0.422139 0.918818 00:00
40 0.421647 0.421659 0.918801 00:00
41 0.422032 0.421945 0.918767 00:00
42 0.423075 0.421543 0.918801 00:00
43 0.421785 0.421447 0.918733 00:00
44 0.422130 0.421138 0.918818 00:00
45 0.421442 0.421086 0.918767 00:00
46 0.421997 0.420918 0.918801 00:00
47 0.422327 0.420826 0.918818 00:00
48 0.421020 0.420877 0.918784 00:00
49 0.420310 0.420940 0.918750 00:00
50 0.421524 0.420804 0.918784 00:00
Paths:  3058 , Tokens:  1372
[36, 64, 181, 1003, 1063, 1335]
Adj. Rand Index Score: 0.951012.
Adj. Mutual Info Score: 0.909999.
Classes:  [0. 1.] n_clusters:  2.0
Seed:  123456789
epoch train_loss valid_loss accuracy time
1 7.017137 6.878190 0.087840 00:00
2 6.643477 6.197146 0.320312 00:00
3 6.006318 5.110162 0.521535 00:00
4 4.844345 3.209101 0.638264 00:00
5 3.316853 1.795212 0.736498 00:00
6 2.147209 1.115885 0.818818 00:00
7 1.399731 0.753382 0.870924 00:00
8 0.954093 0.587307 0.898251 00:00
9 0.707781 0.502579 0.911073 00:00
10 0.576721 0.466429 0.911141 00:00
11 0.508777 0.460322 0.914080 00:00
12 0.473769 0.442921 0.917629 00:00
13 0.457142 0.441750 0.918359 00:00
14 0.448581 0.436101 0.918597 00:00
15 0.444531 0.440070 0.918699 00:00
16 0.441617 0.440377 0.918716 00:00
17 0.439767 0.436971 0.918597 00:00
18 0.438270 0.435470 0.918716 00:00
19 0.437275 0.432478 0.918716 00:00
20 0.435754 0.436174 0.918546 00:00
21 0.434826 0.435958 0.916508 00:00
22 0.434420 0.429394 0.918648 00:00
23 0.433856 0.430365 0.916508 00:00
24 0.433128 0.429061 0.918750 00:00
25 0.432005 0.430671 0.918852 00:00
26 0.430558 0.428604 0.918733 00:00
27 0.429318 0.426835 0.918818 00:00
28 0.428890 0.425801 0.918784 00:00
29 0.429739 0.426460 0.918750 00:00
30 0.429386 0.427223 0.918784 00:00
31 0.429269 0.424235 0.918835 00:00
32 0.428167 0.425157 0.918767 00:00
33 0.427013 0.424845 0.918784 00:00
34 0.426773 0.424350 0.918716 00:00
35 0.426424 0.422890 0.918818 00:00
36 0.425107 0.424125 0.918835 00:00
37 0.424581 0.423103 0.918801 00:00
38 0.423724 0.421759 0.918801 00:00
39 0.423152 0.421920 0.918784 00:00
40 0.422518 0.422469 0.918784 00:00
41 0.422166 0.422614 0.918869 00:00
42 0.421871 0.421643 0.918818 00:00
43 0.422184 0.421409 0.918852 00:00
44 0.422510 0.421301 0.918852 00:00
45 0.421371 0.421397 0.918818 00:00
46 0.421157 0.421217 0.918784 00:00
47 0.420860 0.421129 0.918818 00:00
48 0.420694 0.421030 0.918835 00:00
49 0.420269 0.421029 0.918852 00:00
50 0.420817 0.421000 0.918801 00:00
Paths:  3058 , Tokens:  1372
[36, 64, 181, 1003, 1063, 1335]
Adj. Rand Index Score: 0.951012.
Adj. Mutual Info Score: 0.909999.
Classes:  [0. 1.] n_clusters:  2.0
Seed:  987654321
epoch train_loss valid_loss accuracy time
1 7.058703 6.920393 0.047894 00:00
2 6.707454 6.293720 0.310971 00:00
3 6.084228 5.188914 0.522656 00:00
4 4.905699 3.309880 0.629586 00:00
5 3.371505 1.825627 0.729008 00:00
6 2.178928 1.140837 0.816151 00:00
7 1.411689 0.771000 0.872775 00:00
8 0.960166 0.580654 0.900289 00:00
9 0.708822 0.501180 0.911583 00:00
10 0.576291 0.473657 0.914198 00:00
11 0.506285 0.460301 0.918071 00:00
12 0.471464 0.447850 0.918359 00:00
13 0.457254 0.444729 0.918597 00:00
14 0.447793 0.443605 0.916780 00:00
15 0.444058 0.441806 0.918563 00:00
16 0.441777 0.434802 0.918648 00:00
17 0.439698 0.439796 0.918088 00:00
18 0.437731 0.438012 0.918733 00:00
19 0.436346 0.433271 0.918784 00:00
20 0.437246 0.435797 0.918716 00:00
21 0.436527 0.440247 0.914351 00:00
22 0.433979 0.431665 0.918105 00:00
23 0.433636 0.431824 0.918750 00:00
24 0.433501 0.432018 0.918750 00:00
25 0.433385 0.430475 0.915948 00:00
26 0.432020 0.428339 0.918699 00:00
27 0.430848 0.431205 0.918733 00:00
28 0.430101 0.430657 0.914351 00:00
29 0.430049 0.428840 0.918682 00:00
30 0.428506 0.426172 0.918733 00:00
31 0.428360 0.428847 0.918852 00:00
32 0.427468 0.426117 0.916406 00:00
33 0.425787 0.425045 0.918733 00:00
34 0.426417 0.425037 0.918716 00:00
35 0.425530 0.424351 0.918784 00:00
36 0.427194 0.424430 0.918852 00:00
37 0.425410 0.424045 0.918767 00:00
38 0.425238 0.423300 0.918801 00:00
39 0.424707 0.423453 0.918869 00:00
40 0.424697 0.423016 0.918818 00:00
41 0.423454 0.421948 0.918784 00:00
42 0.423736 0.421850 0.918886 00:00
43 0.424083 0.421327 0.918852 00:00
44 0.422505 0.421106 0.918784 00:00
45 0.422093 0.420990 0.918750 00:00
46 0.422140 0.420956 0.918733 00:00
47 0.421751 0.420873 0.918801 00:00
48 0.421466 0.420797 0.918886 00:00
49 0.421029 0.420777 0.918869 00:00
50 0.420594 0.420875 0.918801 00:00
Paths:  3058 , Tokens:  1372
[36, 64, 181, 1003, 1063, 1335]
Adj. Rand Index Score: 0.951012.
Adj. Mutual Info Score: 0.909999.
Classes:  [0. 1.] n_clusters:  2.0
Seed:  86420
epoch train_loss valid_loss accuracy time
1 7.059550 6.928452 0.045499 00:00
2 6.713783 6.309832 0.374643 00:00
3 6.102345 5.213819 0.546179 00:00
4 4.947294 3.332577 0.620669 00:00
5 3.404826 1.835117 0.734137 00:00
6 2.201776 1.136351 0.815370 00:00
7 1.430607 0.767288 0.873540 00:00
8 0.973591 0.602503 0.894192 00:00
9 0.716220 0.511154 0.910785 00:00
10 0.582854 0.467845 0.916151 00:00
11 0.513305 0.462170 0.918122 00:00
12 0.476099 0.444310 0.918478 00:00
13 0.459376 0.449026 0.916236 00:00
14 0.449475 0.440607 0.912721 00:00
15 0.444278 0.436608 0.918648 00:00
16 0.443413 0.437325 0.913757 00:00
17 0.440427 0.435298 0.917850 00:00
18 0.438821 0.433817 0.917697 00:00
19 0.437047 0.437149 0.918767 00:00
20 0.436670 0.434867 0.916406 00:00
21 0.434998 0.436465 0.913757 00:00
22 0.435107 0.430531 0.918699 00:00
23 0.434583 0.432721 0.916644 00:00
24 0.433000 0.429150 0.918682 00:00
25 0.431419 0.432223 0.918801 00:00
26 0.430751 0.427112 0.918733 00:00
27 0.428854 0.428719 0.918733 00:00
28 0.430212 0.430996 0.918784 00:00
29 0.429969 0.427184 0.918122 00:00
30 0.429004 0.426241 0.918767 00:00
31 0.428244 0.426445 0.918818 00:00
32 0.427956 0.425739 0.918597 00:00
33 0.427744 0.426530 0.918784 00:00
34 0.426080 0.424101 0.918716 00:00
35 0.425002 0.424386 0.918716 00:00
36 0.424890 0.423140 0.918733 00:00
37 0.424275 0.423109 0.918716 00:00
38 0.423405 0.423524 0.918801 00:00
39 0.423160 0.422654 0.918767 00:00
40 0.421670 0.422295 0.918835 00:00
41 0.422828 0.421711 0.918784 00:00
42 0.423683 0.421788 0.918767 00:00
43 0.422920 0.421652 0.918886 00:00
44 0.423356 0.421434 0.918801 00:00
45 0.423586 0.421401 0.918750 00:00
46 0.422662 0.421256 0.918784 00:00
47 0.421630 0.421009 0.918818 00:00
48 0.421153 0.421265 0.918767 00:00
49 0.421498 0.421013 0.918852 00:00
50 0.421730 0.421227 0.918852 00:00
Paths:  3058 , Tokens:  1372
[36, 64, 181, 1003, 1063, 1335]
Adj. Rand Index Score: 0.951012.
Adj. Mutual Info Score: 0.909999.
Classes:  [0. 1.] n_clusters:  2.0


File Name to Train:  data_sets/00_bunch/glass/glass.csv
#############################################################
HDBSCAN
#############################################################
Adj. Rand Index Score: 0.283314.
Adj. Mutual Info Score: 0.402715.
Classes:  [-1  0  1  2  3  4  5  6] n_clusters:  7
QSHIFTPP
#############################################################
Adj. Rand Index Score: 0.333801.
Adj. Mutual Info Score: 0.441001.
Classes:  [0 1 2 3 4 5] n_clusters:  6
HQSHIFT
#############################################################
Adj. Rand Index Score: 0.286975.
Adj. Mutual Info Score: 0.402591.
Classes:  [-1  0  1  2  3  4  5  6] n_clusters:  7
Seed:  0
epoch train_loss valid_loss accuracy time
1 5.427144 5.368369 0.002344 00:00
2 5.399401 5.353737 0.003320 00:00
3 5.365912 5.327244 0.012500 00:00
4 5.319256 5.279968 0.060938 00:00
5 5.258268 5.198246 0.160547 00:00
6 5.182064 5.058128 0.250781 00:00
7 5.087173 4.818050 0.321484 00:00
8 4.966190 4.416709 0.413281 00:00
9 4.811664 3.824367 0.492969 00:00
10 4.618668 3.093404 0.544141 00:00
11 4.386407 2.393784 0.579297 00:00
12 4.122202 1.909624 0.603125 00:00
13 3.850425 1.613908 0.637109 00:00
14 3.584978 1.404215 0.678320 00:00
15 3.334800 1.209343 0.717383 00:00
16 3.104121 1.085332 0.752148 00:00
17 2.891346 0.980158 0.765039 00:00
18 2.697430 0.929224 0.781836 00:00
19 2.521285 0.883328 0.789648 00:00
20 2.362403 0.862884 0.792969 00:00
21 2.219600 0.844672 0.789648 00:00
22 2.091628 0.830644 0.799219 00:00
23 1.976380 0.830674 0.800195 00:00
24 1.872636 0.818609 0.799414 00:00
25 1.778639 0.818283 0.800195 00:00
26 1.693414 0.815225 0.801367 00:00
27 1.617684 0.811447 0.800000 00:00
28 1.547907 0.810262 0.800195 00:00
29 1.484912 0.812950 0.800195 00:00
30 1.428019 0.810805 0.800195 00:00
31 1.375671 0.808737 0.800977 00:00
32 1.327985 0.802834 0.801562 00:00
33 1.284142 0.805412 0.800586 00:00
34 1.244037 0.804609 0.801172 00:00
35 1.207547 0.802274 0.800781 00:00
36 1.174120 0.801325 0.801953 00:00
37 1.143800 0.798815 0.801367 00:00
38 1.115263 0.800357 0.800391 00:00
39 1.089226 0.798268 0.800781 00:00
40 1.065022 0.799015 0.800586 00:00
41 1.043468 0.798556 0.801172 00:00
42 1.023291 0.796274 0.800977 00:00
43 1.005593 0.797120 0.801172 00:00
44 0.988868 0.797076 0.801172 00:00
45 0.973747 0.796438 0.802539 00:00
46 0.960024 0.796847 0.801562 00:00
47 0.947407 0.796639 0.801758 00:00
48 0.935705 0.796548 0.801953 00:00
49 0.924352 0.796077 0.801172 00:00
50 0.914348 0.795353 0.802734 00:00
/home/can/anaconda3/envs/hqshift/lib/python3.6/site-packages/fastai/datasets.py:153: YAMLLoadWarning: calling yaml.load() without Loader=... is deprecated, as the default Loader is unsafe. Please read https://msg.pyyaml.org/load for full details.
  with open(fpath, 'r') as yaml_file: return yaml.load(yaml_file)
Paths:  567 , Tokens:  214
[30, 65, 91, 103, 131, 144, 161, 186, 205]
0
Adj. Rand Index Score: 0.294805.
Adj. Mutual Info Score: 0.394123.
Classes:  [0. 1. 2. 3. 4. 6.] n_clusters:  7.0
Seed:  23
epoch train_loss valid_loss accuracy time
1 5.398248 5.331454 0.001953 00:00
2 5.369689 5.319881 0.004687 00:00
3 5.329267 5.295765 0.022656 00:00
4 5.277300 5.249129 0.079297 00:00
5 5.207437 5.167469 0.219141 00:00
6 5.120356 5.021542 0.337891 00:00
7 5.014138 4.761950 0.394141 00:00
8 4.880582 4.342500 0.473242 00:00
9 4.715627 3.737195 0.536914 00:00
10 4.513608 3.031506 0.570508 00:00
11 4.275284 2.367526 0.591992 00:00
12 4.012300 1.875890 0.625977 00:00
13 3.749454 1.574549 0.656641 00:00
14 3.494818 1.369565 0.673828 00:00
15 3.252964 1.208162 0.712305 00:00
16 3.031253 1.085760 0.754102 00:00
17 2.826158 1.013570 0.748828 00:00
18 2.639806 0.956116 0.754102 00:00
19 2.469978 0.902447 0.792773 00:00
20 2.317794 0.889296 0.782812 00:00
21 2.180147 0.858123 0.777344 00:00
22 2.056628 0.841688 0.800391 00:00
23 1.945992 0.829220 0.799414 00:00
24 1.845740 0.818463 0.800977 00:00
25 1.754671 0.816477 0.797852 00:00
26 1.672415 0.814316 0.800586 00:00
27 1.598014 0.809497 0.802344 00:00
28 1.529709 0.811810 0.800391 00:00
29 1.468987 0.807440 0.800000 00:00
30 1.413436 0.810472 0.800586 00:00
31 1.362966 0.805451 0.800977 00:00
32 1.316888 0.806608 0.800000 00:00
33 1.274596 0.803365 0.801172 00:00
34 1.235708 0.802979 0.800195 00:00
35 1.200293 0.802813 0.800586 00:00
36 1.168010 0.800269 0.800977 00:00
37 1.138070 0.798714 0.801563 00:00
38 1.111602 0.799756 0.801758 00:00
39 1.086605 0.797808 0.800000 00:00
40 1.063183 0.797533 0.802344 00:00
41 1.042333 0.796278 0.802734 00:00
42 1.023434 0.796351 0.802148 00:00
43 1.005940 0.796566 0.801563 00:00
44 0.989363 0.795533 0.800977 00:00
45 0.974159 0.795558 0.801367 00:00
46 0.960199 0.796038 0.801953 00:00
47 0.946614 0.794688 0.802344 00:00
48 0.934538 0.795631 0.801367 00:00
49 0.923215 0.796665 0.800781 00:00
50 0.913385 0.796368 0.800586 00:00
Paths:  567 , Tokens:  214
[30, 65, 91, 103, 131, 144, 161, 186, 205]
0
Adj. Rand Index Score: 0.294805.
Adj. Mutual Info Score: 0.394123.
Classes:  [0. 1. 2. 3. 4. 6.] n_clusters:  7.0
Seed:  42
epoch train_loss valid_loss accuracy time
1 5.374361 5.378659 0.005078 00:00
2 5.339983 5.357896 0.007227 00:00
3 5.292035 5.320826 0.028906 00:00
4 5.230787 5.256152 0.079102 00:00
5 5.148985 5.145339 0.171875 00:00
6 5.047358 4.965795 0.281250 00:00
7 4.928289 4.674118 0.387891 00:00
8 4.781484 4.207337 0.509375 00:00
9 4.601059 3.499662 0.547852 00:00
10 4.384217 2.709545 0.581445 00:00
11 4.134220 2.107385 0.601367 00:00
12 3.868825 1.720106 0.626563 00:00
13 3.604651 1.472240 0.665430 00:00
14 3.351658 1.291419 0.713281 00:00
15 3.119538 1.142360 0.739258 00:00
16 2.903648 1.045710 0.745508 00:00
17 2.708824 0.978470 0.762305 00:00
18 2.531967 0.914025 0.787891 00:00
19 2.372274 0.876546 0.794531 00:00
20 2.227128 0.856778 0.796484 00:00
21 2.098412 0.852706 0.799414 00:00
22 1.981949 0.832158 0.796094 00:00
23 1.877553 0.821958 0.800586 00:00
24 1.783025 0.832075 0.801758 00:00
25 1.697544 0.819706 0.790234 00:00
26 1.620323 0.820846 0.800586 00:00
27 1.550885 0.817348 0.800000 00:00
28 1.487016 0.812146 0.801563 00:00
29 1.429000 0.807847 0.801172 00:00
30 1.376878 0.804745 0.801367 00:00
31 1.328821 0.810850 0.800781 00:00
32 1.285855 0.805254 0.800586 00:00
33 1.245599 0.803053 0.801172 00:00
34 1.209744 0.802491 0.801172 00:00
35 1.176989 0.800639 0.800195 00:00
36 1.146517 0.800478 0.801563 00:00
37 1.118560 0.799258 0.800586 00:00
38 1.093218 0.800826 0.800977 00:00
39 1.069428 0.799199 0.801953 00:00
40 1.047716 0.798319 0.800781 00:00
41 1.027881 0.795561 0.801367 00:00
42 1.009845 0.795856 0.800977 00:00
43 0.993027 0.795522 0.801758 00:00
44 0.977310 0.796876 0.801367 00:00
45 0.962652 0.796104 0.802148 00:00
46 0.949457 0.796622 0.801758 00:00
47 0.937269 0.795296 0.801172 00:00
48 0.926594 0.795738 0.801953 00:00
49 0.917096 0.795468 0.801172 00:00
50 0.907895 0.796096 0.801367 00:00
Paths:  567 , Tokens:  214
[30, 65, 91, 103, 131, 144, 161, 186, 205]
Adj. Rand Index Score: 0.294805.
Adj. Mutual Info Score: 0.394123.
Classes:  [0. 1. 2. 3. 4. 6.] n_clusters:  7.0
Seed:  1234
epoch train_loss valid_loss accuracy time
1 5.438473 5.382486 0.000977 00:00
2 5.417439 5.372610 0.002539 00:00
3 5.386519 5.351558 0.004883 00:00
4 5.343688 5.310810 0.032227 00:00
5 5.285374 5.235862 0.119727 00:00
6 5.210473 5.105072 0.234570 00:00
7 5.116263 4.874384 0.359766 00:00
8 4.999289 4.483549 0.429492 00:00
9 4.853271 3.910456 0.519922 00:00
10 4.667958 3.187985 0.540625 00:00
11 4.438830 2.444216 0.559766 00:00
12 4.178160 1.936840 0.607813 00:00
13 3.905120 1.637382 0.647656 00:00
14 3.640414 1.419299 0.683398 00:00
15 3.390141 1.261526 0.710547 00:00
16 3.155842 1.140978 0.734570 00:00
17 2.940582 1.015607 0.771680 00:00
18 2.743719 0.954670 0.771094 00:00
19 2.563874 0.902828 0.782422 00:00
20 2.402635 0.869242 0.793359 00:00
21 2.258146 0.871749 0.795898 00:00
22 2.127726 0.848889 0.783789 00:00
23 2.010966 0.840084 0.799023 00:00
24 1.904624 0.826150 0.794141 00:00
25 1.809020 0.822109 0.800195 00:00
26 1.723087 0.818189 0.801563 00:00
27 1.644708 0.815378 0.801172 00:00
28 1.572955 0.808805 0.801172 00:00
29 1.508170 0.809144 0.801172 00:00
30 1.449402 0.807901 0.799609 00:00
31 1.395845 0.808553 0.795898 00:00
32 1.346809 0.805604 0.800195 00:00
33 1.301498 0.805667 0.799805 00:00
34 1.260844 0.801534 0.801563 00:00
35 1.223333 0.803855 0.801172 00:00
36 1.188969 0.801589 0.799023 00:00
37 1.157363 0.800275 0.801172 00:00
38 1.128395 0.799552 0.800977 00:00
39 1.102524 0.800034 0.800977 00:00
40 1.077862 0.800371 0.799609 00:00
41 1.055149 0.799199 0.799414 00:00
42 1.034487 0.798135 0.800000 00:00
43 1.014956 0.796940 0.800781 00:00
44 0.998750 0.796775 0.801563 00:00
45 0.982787 0.796935 0.800781 00:00
46 0.967706 0.797409 0.799805 00:00
47 0.953863 0.797416 0.799805 00:00
48 0.941485 0.796850 0.800391 00:00
49 0.930310 0.796693 0.799805 00:00
50 0.919816 0.796753 0.799414 00:00
Paths:  567 , Tokens:  214
[30, 65, 91, 103, 131, 144, 161, 186, 205]
0
Adj. Rand Index Score: 0.294805.
Adj. Mutual Info Score: 0.394123.
Classes:  [0. 1. 2. 3. 4. 6.] n_clusters:  7.0
Seed:  43210
epoch train_loss valid_loss accuracy time
1 5.415949 5.372629 0.001563 00:00
2 5.387386 5.356244 0.001758 00:00
3 5.350883 5.326912 0.003711 00:00
4 5.300557 5.274119 0.052539 00:00
5 5.235893 5.180200 0.166602 00:00
6 5.154415 5.020784 0.235547 00:00
7 5.056523 4.765321 0.393945 00:00
8 4.934827 4.379003 0.467578 00:00
9 4.779448 3.792989 0.510156 00:00
10 4.584645 3.064451 0.558398 00:00
11 4.356037 2.421943 0.584766 00:00
12 4.101272 1.968108 0.612695 00:00
13 3.837967 1.625907 0.659766 00:00
14 3.577778 1.374960 0.694727 00:00
15 3.330235 1.201552 0.729492 00:00
16 3.099359 1.081877 0.746289 00:00
17 2.887284 0.988439 0.759375 00:00
18 2.693635 0.942499 0.771484 00:00
19 2.517783 0.906363 0.781055 00:00
20 2.359926 0.863814 0.798047 00:00
21 2.217594 0.854104 0.795508 00:00
22 2.089619 0.836234 0.789844 00:00
23 1.974926 0.825812 0.800000 00:00
24 1.871379 0.821260 0.793750 00:00
25 1.777496 0.815331 0.799609 00:00
26 1.693822 0.814534 0.800000 00:00
27 1.617368 0.807694 0.801172 00:00
28 1.547397 0.810921 0.800195 00:00
29 1.485135 0.806175 0.801172 00:00
30 1.427836 0.808170 0.801172 00:00
31 1.376165 0.805212 0.800781 00:00
32 1.328946 0.803677 0.800977 00:00
33 1.284955 0.804756 0.797070 00:00
34 1.245515 0.803555 0.800195 00:00
35 1.209459 0.802104 0.801367 00:00
36 1.176084 0.799711 0.800195 00:00
37 1.145829 0.799598 0.800977 00:00
38 1.118161 0.798446 0.800781 00:00
39 1.092801 0.796618 0.800586 00:00
40 1.068562 0.797521 0.800586 00:00
41 1.047298 0.795061 0.801172 00:00
42 1.027468 0.796186 0.800586 00:00
43 1.009327 0.794895 0.801563 00:00
44 0.992661 0.795217 0.801172 00:00
45 0.976971 0.794370 0.802539 00:00
46 0.962707 0.794178 0.802539 00:00
47 0.949888 0.794305 0.800781 00:00
48 0.937328 0.794549 0.800391 00:00
49 0.925604 0.794452 0.800586 00:00
50 0.915789 0.794068 0.800781 00:00
Paths:  567 , Tokens:  214
[30, 65, 91, 103, 131, 144, 161, 186, 205]
0
Adj. Rand Index Score: 0.294805.
Adj. Mutual Info Score: 0.394123.
Classes:  [0. 1. 2. 3. 4. 6.] n_clusters:  7.0
Seed:  1133557799
epoch train_loss valid_loss accuracy time
1 5.445707 5.404941 0.003516 00:00
2 5.419288 5.391619 0.010938 00:00
3 5.387238 5.367760 0.012695 00:00
4 5.342072 5.324619 0.030469 00:00
5 5.281643 5.246801 0.145313 00:00
6 5.206069 5.115916 0.249414 00:00
7 5.113408 4.890670 0.316211 00:00
8 4.995150 4.512926 0.462109 00:00
9 4.842824 3.923444 0.531445 00:00
10 4.649858 3.172139 0.589453 00:00
11 4.414536 2.453944 0.622852 00:00
12 4.146249 1.918891 0.638477 00:00
13 3.870106 1.577405 0.660547 00:00
14 3.599639 1.340352 0.708789 00:00
15 3.345223 1.189892 0.732031 00:00
16 3.109722 1.074925 0.745313 00:00
17 2.895240 0.977418 0.772266 00:00
18 2.700114 0.915190 0.785156 00:00
19 2.525062 0.882366 0.787109 00:00
20 2.365483 0.852865 0.797266 00:00
21 2.223419 0.837206 0.795312 00:00
22 2.094583 0.830641 0.800781 00:00
23 1.979034 0.825564 0.787891 00:00
24 1.874837 0.822311 0.790039 00:00
25 1.780612 0.819334 0.800195 00:00
26 1.695813 0.812046 0.794531 00:00
27 1.619022 0.807613 0.800391 00:00
28 1.549798 0.806643 0.800781 00:00
29 1.486589 0.805936 0.800000 00:00
30 1.428138 0.802629 0.800781 00:00
31 1.375066 0.803077 0.800781 00:00
32 1.326589 0.800481 0.801367 00:00
33 1.282177 0.800856 0.800977 00:00
34 1.242726 0.801639 0.800195 00:00
35 1.205885 0.800236 0.800391 00:00
36 1.173294 0.797816 0.800977 00:00
37 1.143243 0.798260 0.801172 00:00
38 1.114828 0.796272 0.801563 00:00
39 1.089547 0.797639 0.800977 00:00
40 1.065755 0.795402 0.801367 00:00
41 1.044314 0.794953 0.800391 00:00
42 1.024371 0.795076 0.801562 00:00
43 1.005339 0.795108 0.801172 00:00
44 0.988192 0.794587 0.800586 00:00
45 0.972780 0.794895 0.801367 00:00
46 0.958902 0.795103 0.801172 00:00
47 0.946486 0.794446 0.802148 00:00
48 0.934472 0.794027 0.801758 00:00
49 0.923011 0.794338 0.801172 00:00
50 0.912894 0.794236 0.801563 00:00
Paths:  567 , Tokens:  214
[30, 65, 91, 103, 131, 144, 161, 186, 205]
0
Adj. Rand Index Score: 0.294805.
Adj. Mutual Info Score: 0.394123.
Classes:  [0. 1. 2. 3. 4. 6.] n_clusters:  7.0
Seed:  22446688
epoch train_loss valid_loss accuracy time
1 5.432507 5.368482 0.002148 00:00
2 5.403538 5.354968 0.005859 00:00
3 5.364209 5.326710 0.009180 00:00
4 5.312761 5.274282 0.016406 00:00
5 5.246958 5.181679 0.090625 00:00
6 5.163605 5.027995 0.248828 00:00
7 5.062183 4.773528 0.367383 00:00
8 4.934366 4.358638 0.454883 00:00
9 4.772567 3.746915 0.532227 00:00
10 4.573374 3.000885 0.584961 00:00
11 4.335189 2.311109 0.617188 00:00
12 4.072542 1.834321 0.635742 00:00
13 3.800724 1.540442 0.660547 00:00
14 3.536475 1.321257 0.704297 00:00
15 3.290630 1.182073 0.730469 00:00
16 3.061923 1.064225 0.743359 00:00
17 2.853572 0.983672 0.774414 00:00
18 2.664521 0.942278 0.775000 00:00
19 2.492921 0.899311 0.781641 00:00
20 2.336987 0.875144 0.792969 00:00
21 2.198127 0.848582 0.798242 00:00
22 2.072954 0.840340 0.798438 00:00
23 1.960207 0.830013 0.791016 00:00
24 1.858200 0.823771 0.799219 00:00
25 1.766188 0.819789 0.796289 00:00
26 1.683505 0.813071 0.800586 00:00
27 1.607569 0.811530 0.800781 00:00
28 1.539718 0.809159 0.798828 00:00
29 1.477788 0.808614 0.800000 00:00
30 1.421047 0.808183 0.799609 00:00
31 1.368896 0.804954 0.800586 00:00
32 1.321630 0.803534 0.800977 00:00
33 1.278513 0.801797 0.800195 00:00
34 1.238955 0.800470 0.801758 00:00
35 1.202372 0.802020 0.800977 00:00
36 1.170228 0.799871 0.800977 00:00
37 1.140174 0.798878 0.800195 00:00
38 1.112306 0.798711 0.800977 00:00
39 1.087047 0.797429 0.801172 00:00
40 1.063341 0.797981 0.800781 00:00
41 1.042231 0.796601 0.801367 00:00
42 1.022938 0.796355 0.800977 00:00
43 1.004754 0.795888 0.800586 00:00
44 0.988155 0.794983 0.799805 00:00
45 0.972633 0.794999 0.800586 00:00
46 0.958317 0.795625 0.800977 00:00
47 0.945728 0.794926 0.801953 00:00
48 0.934246 0.795342 0.800000 00:00
49 0.923352 0.794570 0.800781 00:00
50 0.913195 0.796322 0.799609 00:00
Paths:  567 , Tokens:  214
[30, 65, 91, 103, 131, 144, 161, 186, 205]
0
Adj. Rand Index Score: 0.294805.
Adj. Mutual Info Score: 0.394123.
Classes:  [0. 1. 2. 3. 4. 6.] n_clusters:  7.0
Seed:  123456789
epoch train_loss valid_loss accuracy time
1 5.418828 5.368891 0.012891 00:00
2 5.390048 5.354771 0.018359 00:00
3 5.352821 5.327748 0.039453 00:00
4 5.300632 5.277109 0.079297 00:00
5 5.231560 5.187832 0.141797 00:00
6 5.144809 5.034313 0.261914 00:00
7 5.038313 4.778243 0.375781 00:00
8 4.907612 4.372129 0.455469 00:00
9 4.743863 3.780737 0.529102 00:00
10 4.542942 3.050423 0.575391 00:00
11 4.306057 2.362485 0.600195 00:00
12 4.042459 1.851825 0.626172 00:00
13 3.774402 1.533700 0.675391 00:00
14 3.512177 1.289260 0.713867 00:00
15 3.264300 1.159420 0.733203 00:00
16 3.035799 1.033432 0.755078 00:00
17 2.827238 0.980518 0.765234 00:00
18 2.637195 0.928460 0.766797 00:00
19 2.466940 0.905572 0.784766 00:00
20 2.312842 0.873604 0.776367 00:00
21 2.174833 0.843754 0.799219 00:00
22 2.050662 0.833562 0.784180 00:00
23 1.938625 0.822558 0.799219 00:00
24 1.837752 0.816449 0.800000 00:00
25 1.747066 0.811970 0.795508 00:00
26 1.665430 0.814990 0.798438 00:00
27 1.591843 0.813639 0.800977 00:00
28 1.524798 0.808399 0.801758 00:00
29 1.464110 0.809906 0.799609 00:00
30 1.408702 0.804273 0.799414 00:00
31 1.357911 0.802864 0.799805 00:00
32 1.311893 0.804022 0.800000 00:00
33 1.269561 0.800617 0.800000 00:00
34 1.231596 0.799093 0.800195 00:00
35 1.195944 0.800463 0.800391 00:00
36 1.163902 0.798366 0.800195 00:00
37 1.134108 0.798747 0.800781 00:00
38 1.107098 0.797464 0.801172 00:00
39 1.081676 0.796666 0.801367 00:00
40 1.059233 0.796176 0.800781 00:00
41 1.038508 0.795301 0.800977 00:00
42 1.018599 0.795221 0.800195 00:00
43 1.000575 0.794969 0.799219 00:00
44 0.984284 0.794186 0.800977 00:00
45 0.969127 0.794456 0.800977 00:00
46 0.955541 0.794302 0.800781 00:00
47 0.942721 0.794317 0.800781 00:00
48 0.931369 0.794775 0.800586 00:00
49 0.921002 0.793508 0.801367 00:00
50 0.911263 0.794619 0.800977 00:00
Paths:  567 , Tokens:  214
[30, 65, 91, 103, 131, 144, 161, 186, 205]
0
Adj. Rand Index Score: 0.294805.
Adj. Mutual Info Score: 0.394123.
Classes:  [0. 1. 2. 3. 4. 6.] n_clusters:  7.0
Seed:  987654321
epoch train_loss valid_loss accuracy time
1 5.397236 5.358831 0.018555 00:00
2 5.368382 5.345411 0.021875 00:00
3 5.330151 5.317958 0.037891 00:00
4 5.275702 5.264295 0.083398 00:00
5 5.207289 5.167618 0.198242 00:00
6 5.120462 5.002195 0.306055 00:00
7 5.014652 4.728115 0.387305 00:00
8 4.880757 4.292603 0.481250 00:00
9 4.711308 3.639076 0.542578 00:00
10 4.505122 2.885981 0.573633 00:00
11 4.262777 2.244725 0.599219 00:00
12 3.997744 1.833872 0.611133 00:00
13 3.729007 1.549649 0.647852 00:00
14 3.468719 1.317632 0.701758 00:00
15 3.227343 1.141816 0.735352 00:00
16 3.002196 1.028745 0.754102 00:00
17 2.796268 0.960370 0.779297 00:00
18 2.609006 0.908250 0.789844 00:00
19 2.440930 0.885023 0.794336 00:00
20 2.289992 0.851916 0.793164 00:00
21 2.153936 0.842921 0.790039 00:00
22 2.031816 0.845770 0.799805 00:00
23 1.922322 0.832217 0.778516 00:00
24 1.824376 0.822419 0.800586 00:00
25 1.735398 0.822433 0.801172 00:00
26 1.654452 0.818762 0.796484 00:00
27 1.581036 0.814686 0.800195 00:00
28 1.514118 0.815182 0.801562 00:00
29 1.453268 0.808061 0.802344 00:00
30 1.398583 0.809029 0.801172 00:00
31 1.348803 0.804467 0.800977 00:00
32 1.303195 0.802211 0.801172 00:00
33 1.261931 0.800696 0.801367 00:00
34 1.224258 0.798310 0.801953 00:00
35 1.189537 0.800376 0.801758 00:00
36 1.157730 0.800444 0.800391 00:00
37 1.128529 0.799244 0.801758 00:00
38 1.101480 0.798360 0.800781 00:00
39 1.077014 0.797033 0.801367 00:00
40 1.054189 0.796945 0.801172 00:00
41 1.033800 0.796797 0.800781 00:00
42 1.015005 0.796169 0.800781 00:00
43 0.997927 0.796221 0.800000 00:00
44 0.981602 0.796247 0.799805 00:00
45 0.967038 0.795532 0.800781 00:00
46 0.953310 0.794934 0.801172 00:00
47 0.941302 0.794185 0.801172 00:00
48 0.929382 0.795850 0.801172 00:00
49 0.919317 0.794937 0.801563 00:00
50 0.909220 0.794462 0.801367 00:00
Paths:  567 , Tokens:  214
[30, 65, 91, 103, 131, 144, 161, 186, 205]
0
Adj. Rand Index Score: 0.294805.
Adj. Mutual Info Score: 0.394123.
Classes:  [0. 1. 2. 3. 4. 6.] n_clusters:  7.0
Seed:  86420
epoch train_loss valid_loss accuracy time
1 5.407234 5.359961 0.014062 00:00
2 5.381419 5.347956 0.015430 00:00
3 5.347782 5.323798 0.026953 00:00
4 5.302121 5.277963 0.058594 00:00
5 5.243896 5.197117 0.175977 00:00
6 5.170649 5.058437 0.279102 00:00
7 5.080173 4.834910 0.368750 00:00
8 4.965621 4.476650 0.424219 00:00
9 4.820881 3.942787 0.462695 00:00
10 4.638288 3.250445 0.540820 00:00
11 4.414824 2.532573 0.585352 00:00
12 4.158218 1.973486 0.627734 00:00
13 3.889092 1.613008 0.657813 00:00
14 3.623730 1.371463 0.688086 00:00
15 3.369822 1.199490 0.729102 00:00
16 3.134225 1.074278 0.751953 00:00
17 2.917390 0.983997 0.761914 00:00
18 2.719469 0.952116 0.766211 00:00
19 2.542233 0.887394 0.788477 00:00
20 2.381347 0.863327 0.791797 00:00
21 2.236998 0.850426 0.779883 00:00
22 2.107556 0.830791 0.800391 00:00
23 1.992019 0.835173 0.798828 00:00
24 1.887831 0.829078 0.794727 00:00
25 1.793088 0.819388 0.800781 00:00
26 1.707583 0.811581 0.801172 00:00
27 1.630101 0.811885 0.800977 00:00
28 1.559145 0.810229 0.800391 00:00
29 1.494647 0.807598 0.800195 00:00
30 1.436920 0.806971 0.801367 00:00
31 1.385030 0.806527 0.799805 00:00
32 1.336386 0.804199 0.800391 00:00
33 1.292697 0.801871 0.800977 00:00
34 1.252198 0.804008 0.800977 00:00
35 1.215796 0.800028 0.801563 00:00
36 1.182603 0.800822 0.800586 00:00
37 1.151228 0.797123 0.801172 00:00
38 1.122481 0.796442 0.801367 00:00
39 1.096907 0.795317 0.801367 00:00
40 1.073471 0.795591 0.800781 00:00
41 1.051231 0.795421 0.800781 00:00
42 1.031033 0.795127 0.800977 00:00
43 1.012427 0.793929 0.801367 00:00
44 0.995373 0.794554 0.800000 00:00
45 0.979965 0.794341 0.800977 00:00
46 0.966091 0.793880 0.800195 00:00
47 0.952607 0.794075 0.800781 00:00
48 0.940047 0.793610 0.801172 00:00
49 0.928344 0.793673 0.800781 00:00
50 0.917680 0.793542 0.800977 00:00
Paths:  567 , Tokens:  214
[30, 65, 91, 103, 131, 144, 161, 186, 205]
0
Adj. Rand Index Score: 0.294805.
Adj. Mutual Info Score: 0.394123.
Classes:  [0. 1. 2. 3. 4. 6.] n_clusters:  7.0


File Name to Train:  data_sets/00_bunch/iris/iris.csv
#############################################################
HDBSCAN
#############################################################
Adj. Rand Index Score: 0.539409.
Adj. Mutual Info Score: 0.547691.
Classes:  [-1  0  1] n_clusters:  2
QSHIFTPP
#############################################################
Adj. Rand Index Score: 0.568116.
Adj. Mutual Info Score: 0.576771.
Classes:  [0 1] n_clusters:  2
HQSHIFT
#############################################################
Adj. Rand Index Score: 0.568116.
Adj. Mutual Info Score: 0.576771.
Classes:  [0 1] n_clusters:  2
Seed:  0
epoch train_loss valid_loss accuracy time
1 5.053304 4.988181 0.001563 00:00
2 5.025689 4.976361 0.006510 00:00
3 4.992795 4.954509 0.027344 00:00
4 4.943776 4.915385 0.126823 00:00
5 4.881144 4.849856 0.191927 00:00
6 4.805586 4.745351 0.274479 00:00
7 4.716297 4.583688 0.338281 00:00
8 4.610381 4.338599 0.405469 00:00
9 4.486489 3.985866 0.451302 00:00
10 4.339971 3.512516 0.541667 00:00
11 4.170335 2.932702 0.591406 00:00
12 3.979907 2.330271 0.618229 00:00
13 3.775810 1.874463 0.643229 00:00
14 3.563110 1.549128 0.671354 00:00
15 3.355114 1.331996 0.701302 00:00
16 3.156509 1.166981 0.727083 00:00
17 2.969527 1.040757 0.752604 00:00
18 2.795720 0.949988 0.777344 00:00
19 2.635508 0.874407 0.795313 00:00
20 2.486821 0.827717 0.803385 00:00
21 2.350394 0.793233 0.809375 00:00
22 2.226846 0.767423 0.811198 00:00
23 2.112904 0.754300 0.811719 00:00
24 2.007451 0.742568 0.812760 00:00
25 1.911797 0.737257 0.813021 00:00
26 1.824705 0.730928 0.812760 00:00
27 1.745300 0.726970 0.813542 00:00
28 1.672464 0.724646 0.814323 00:00
29 1.604390 0.722283 0.815104 00:00
30 1.542273 0.720453 0.814063 00:00
31 1.485147 0.721667 0.815104 00:00
32 1.432332 0.719225 0.815365 00:00
33 1.384411 0.717367 0.813021 00:00
34 1.339536 0.718429 0.815365 00:00
35 1.297866 0.715540 0.812500 00:00
36 1.258580 0.714755 0.814323 00:00
37 1.222965 0.715073 0.813021 00:00
38 1.188933 0.715561 0.814844 00:00
39 1.157756 0.713416 0.814063 00:00
40 1.128789 0.713249 0.815104 00:00
41 1.101854 0.713057 0.813021 00:00
42 1.076556 0.712578 0.813802 00:00
43 1.053882 0.712590 0.813802 00:00
44 1.031641 0.714457 0.813281 00:00
45 1.011611 0.711717 0.813542 00:00
46 0.993231 0.712175 0.814063 00:00
47 0.975510 0.711659 0.813542 00:00
48 0.959973 0.711150 0.814063 00:00
49 0.944979 0.712483 0.813542 00:00
50 0.931133 0.712051 0.813281 00:00
/home/can/anaconda3/envs/hqshift/lib/python3.6/site-packages/fastai/datasets.py:153: YAMLLoadWarning: calling yaml.load() without Loader=... is deprecated, as the default Loader is unsafe. Please read https://msg.pyyaml.org/load for full details.
  with open(fpath, 'r') as yaml_file: return yaml.load(yaml_file)
Paths:  318 , Tokens:  150
[8, 56, 79, 127]
0
Adj. Rand Index Score: 0.543752.
Adj. Mutual Info Score: 0.542515.
Classes:  [0. 1.] n_clusters:  2.0
Seed:  23
epoch train_loss valid_loss accuracy time
1 5.019341 4.996638 0.018229 00:00
2 4.997609 4.987558 0.044271 00:00
3 4.971472 4.970953 0.063802 00:00
4 4.936319 4.941093 0.102865 00:00
5 4.888903 4.891029 0.174740 00:00
6 4.829467 4.809386 0.291406 00:00
7 4.757714 4.678298 0.387500 00:00
8 4.670977 4.463720 0.445312 00:00
9 4.564978 4.132545 0.500521 00:00
10 4.436446 3.649446 0.538542 00:00
11 4.283046 3.050132 0.579167 00:00
12 4.104758 2.446071 0.605208 00:00
13 3.903278 1.959281 0.657552 00:00
14 3.690152 1.606320 0.692708 00:00
15 3.475927 1.358054 0.712760 00:00
16 3.269978 1.175423 0.741406 00:00
17 3.075555 1.049262 0.764583 00:00
18 2.892744 0.950978 0.782813 00:00
19 2.723670 0.891574 0.791406 00:00
20 2.569546 0.844049 0.802344 00:00
21 2.426132 0.811740 0.807552 00:00
22 2.295455 0.780904 0.810156 00:00
23 2.176633 0.771983 0.810938 00:00
24 2.067152 0.753593 0.811979 00:00
25 1.969306 0.754714 0.812240 00:00
26 1.878135 0.739963 0.814063 00:00
27 1.794144 0.735522 0.814323 00:00
28 1.717579 0.732907 0.813802 00:00
29 1.646252 0.727329 0.814583 00:00
30 1.581272 0.723949 0.814063 00:00
31 1.521919 0.720979 0.814323 00:00
32 1.467096 0.721284 0.814063 00:00
33 1.416345 0.720118 0.814583 00:00
34 1.369164 0.716588 0.814063 00:00
35 1.325877 0.716317 0.814063 00:00
36 1.285874 0.714047 0.813802 00:00
37 1.248637 0.712475 0.813542 00:00
38 1.214119 0.712855 0.813802 00:00
39 1.181768 0.712797 0.814323 00:00
40 1.151778 0.713222 0.815365 00:00
41 1.123651 0.712171 0.815365 00:00
42 1.096974 0.710996 0.814844 00:00
43 1.072296 0.712242 0.813802 00:00
44 1.049550 0.710749 0.812760 00:00
45 1.028424 0.711791 0.813281 00:00
46 1.008509 0.711610 0.814063 00:00
47 0.990224 0.711412 0.813802 00:00
48 0.971730 0.711370 0.813802 00:00
49 0.955287 0.711508 0.813542 00:00
50 0.939903 0.710920 0.814583 00:00
Paths:  318 , Tokens:  150
[8, 56, 79, 127]
0
Adj. Rand Index Score: 0.543752.
Adj. Mutual Info Score: 0.542515.
Classes:  [0. 1.] n_clusters:  2.0
Seed:  42
epoch train_loss valid_loss accuracy time
1 5.080227 5.022364 0.024219 00:00
2 5.057003 5.011244 0.027083 00:00
3 5.025050 4.990326 0.041927 00:00
4 4.980808 4.952816 0.048958 00:00
5 4.920654 4.889297 0.090365 00:00
6 4.850921 4.789301 0.179948 00:00
7 4.769736 4.631716 0.294531 00:00
8 4.674830 4.396720 0.410938 00:00
9 4.560826 4.047606 0.509896 00:00
10 4.422939 3.553832 0.539844 00:00
11 4.258340 2.951153 0.563281 00:00
12 4.069800 2.348383 0.597396 00:00
13 3.865042 1.893263 0.631250 00:00
14 3.654904 1.585672 0.674219 00:00
15 3.441931 1.350382 0.709896 00:00
16 3.238689 1.181249 0.736719 00:00
17 3.045974 1.047996 0.756250 00:00
18 2.865608 0.954951 0.775521 00:00
19 2.698042 0.877726 0.784635 00:00
20 2.544532 0.831887 0.797917 00:00
21 2.403277 0.803505 0.805469 00:00
22 2.274712 0.776508 0.806510 00:00
23 2.156538 0.760842 0.812760 00:00
24 2.048886 0.751240 0.813021 00:00
25 1.950785 0.742246 0.813021 00:00
26 1.861506 0.733962 0.814844 00:00
27 1.779399 0.728757 0.813542 00:00
28 1.704007 0.726900 0.814844 00:00
29 1.634964 0.723051 0.814323 00:00
30 1.571654 0.724257 0.813542 00:00
31 1.512591 0.721041 0.814583 00:00
32 1.457596 0.719540 0.814323 00:00
33 1.407585 0.718162 0.815365 00:00
34 1.361499 0.718239 0.814323 00:00
35 1.319011 0.718996 0.815365 00:00
36 1.278934 0.720189 0.815365 00:00
37 1.241441 0.718139 0.814323 00:00
38 1.206758 0.717821 0.814844 00:00
39 1.174752 0.715610 0.815625 00:00
40 1.144594 0.714651 0.814583 00:00
41 1.116919 0.715198 0.814844 00:00
42 1.091714 0.714607 0.815625 00:00
43 1.067560 0.715374 0.814323 00:00
44 1.045173 0.712965 0.816146 00:00
45 1.024142 0.712713 0.815365 00:00
46 1.004097 0.713506 0.815625 00:00
47 0.985308 0.712885 0.815885 00:00
48 0.968102 0.712279 0.814583 00:00
49 0.952933 0.714828 0.815104 00:00
50 0.938205 0.713824 0.815104 00:00
Paths:  318 , Tokens:  150
[8, 56, 79, 127]
0
Adj. Rand Index Score: 0.543752.
Adj. Mutual Info Score: 0.542515.
Classes:  [0. 1.] n_clusters:  2.0
Seed:  1234
epoch train_loss valid_loss accuracy time
1 5.042325 5.047029 0.001302 00:00
2 5.022120 5.036441 0.002344 00:00
3 4.996775 5.017320 0.009896 00:00
4 4.959430 4.984974 0.024740 00:00
5 4.912694 4.930768 0.081250 00:00
6 4.853024 4.843086 0.186198 00:00
7 4.780111 4.706603 0.327083 00:00
8 4.690701 4.487967 0.429688 00:00
9 4.581708 4.150346 0.473438 00:00
10 4.448578 3.657698 0.518490 00:00
11 4.288664 3.045215 0.566406 00:00
12 4.100779 2.403355 0.621354 00:00
13 3.891055 1.891268 0.648177 00:00
14 3.673012 1.549185 0.682813 00:00
15 3.456430 1.316680 0.722396 00:00
16 3.246726 1.146336 0.750000 00:00
17 3.050423 1.018105 0.767708 00:00
18 2.868073 0.922228 0.783333 00:00
19 2.700185 0.859596 0.793490 00:00
20 2.546235 0.817470 0.806510 00:00
21 2.404366 0.793236 0.806771 00:00
22 2.274653 0.768772 0.793490 00:00
23 2.156503 0.752551 0.812500 00:00
24 2.048486 0.742114 0.813542 00:00
25 1.950352 0.738362 0.811979 00:00
26 1.860897 0.729546 0.814583 00:00
27 1.778411 0.728334 0.814063 00:00
28 1.702261 0.723560 0.814063 00:00
29 1.632258 0.720694 0.815885 00:00
30 1.568438 0.720170 0.813542 00:00
31 1.509416 0.719806 0.814583 00:00
32 1.456276 0.718816 0.814583 00:00
33 1.405846 0.716525 0.814844 00:00
34 1.359496 0.716177 0.814323 00:00
35 1.316107 0.717262 0.814323 00:00
36 1.276577 0.715408 0.814844 00:00
37 1.238863 0.714868 0.816146 00:00
38 1.205547 0.713977 0.815104 00:00
39 1.173715 0.713956 0.815365 00:00
40 1.143818 0.713564 0.814583 00:00
41 1.116458 0.714040 0.814063 00:00
42 1.090387 0.712152 0.815625 00:00
43 1.065840 0.712252 0.814323 00:00
44 1.043957 0.711886 0.814844 00:00
45 1.023331 0.712977 0.814583 00:00
46 1.003504 0.711233 0.813542 00:00
47 0.985069 0.710909 0.814063 00:00
48 0.968074 0.710740 0.814323 00:00
49 0.952053 0.711466 0.813802 00:00
50 0.936766 0.711831 0.813542 00:00
Paths:  318 , Tokens:  150
[8, 56, 79, 127]
0
Adj. Rand Index Score: 0.543752.
Adj. Mutual Info Score: 0.542515.
Classes:  [0. 1.] n_clusters:  2.0
Seed:  43210
epoch train_loss valid_loss accuracy time
1 5.071328 4.992609 0.018229 00:00
2 5.043644 4.982543 0.034375 00:00
3 5.010444 4.964072 0.054948 00:00
4 4.965611 4.930330 0.076563 00:00
5 4.906096 4.873090 0.163021 00:00
6 4.831231 4.780248 0.275781 00:00
7 4.741326 4.633849 0.361979 00:00
8 4.635673 4.401793 0.394792 00:00
9 4.510111 4.041659 0.459115 00:00
10 4.365275 3.539735 0.508854 00:00
11 4.197216 2.950337 0.567448 00:00
12 4.005839 2.376667 0.617708 00:00
13 3.799995 1.897111 0.638542 00:00
14 3.587721 1.544759 0.670573 00:00
15 3.375922 1.306261 0.698958 00:00
16 3.172076 1.136634 0.742188 00:00
17 2.980874 1.020811 0.760677 00:00
18 2.802851 0.934059 0.776563 00:00
19 2.638977 0.868563 0.793750 00:00
20 2.488060 0.817468 0.802865 00:00
21 2.350771 0.782762 0.811979 00:00
22 2.226339 0.768921 0.813802 00:00
23 2.110989 0.753730 0.814323 00:00
24 2.005932 0.741208 0.813802 00:00
25 1.910660 0.737915 0.814323 00:00
26 1.823836 0.729591 0.814323 00:00
27 1.744348 0.727745 0.816146 00:00
28 1.672214 0.726062 0.815104 00:00
29 1.604780 0.718714 0.814583 00:00
30 1.542345 0.718500 0.815365 00:00
31 1.484714 0.715432 0.815625 00:00
32 1.431729 0.717795 0.814063 00:00
33 1.383804 0.715671 0.815365 00:00
34 1.338941 0.714684 0.814063 00:00
35 1.296706 0.713797 0.814063 00:00
36 1.258308 0.713506 0.815365 00:00
37 1.222434 0.713756 0.815104 00:00
38 1.189285 0.713101 0.814583 00:00
39 1.158855 0.714123 0.815104 00:00
40 1.130809 0.713216 0.815104 00:00
41 1.103926 0.711621 0.815365 00:00
42 1.079272 0.711747 0.815365 00:00
43 1.056297 0.711242 0.815365 00:00
44 1.033900 0.711491 0.814323 00:00
45 1.013398 0.710513 0.815625 00:00
46 0.994760 0.710149 0.815365 00:00
47 0.977171 0.711718 0.814583 00:00
48 0.960160 0.710829 0.813802 00:00
49 0.945268 0.710125 0.814844 00:00
50 0.930707 0.712111 0.814063 00:00
Paths:  318 , Tokens:  150
[8, 56, 79, 127]
0
Adj. Rand Index Score: 0.543752.
Adj. Mutual Info Score: 0.542515.
Classes:  [0. 1.] n_clusters:  2.0
Seed:  1133557799
epoch train_loss valid_loss accuracy time
1 5.087507 5.045917 0.004948 00:00
2 5.062593 5.035278 0.009635 00:00
3 5.029911 5.015891 0.013281 00:00
4 4.990533 4.981647 0.049219 00:00
5 4.936895 4.925339 0.078385 00:00
6 4.870072 4.835527 0.139323 00:00
7 4.789662 4.698284 0.261458 00:00
8 4.693760 4.488612 0.394792 00:00
9 4.578603 4.170987 0.498177 00:00
10 4.439440 3.701710 0.571615 00:00
11 4.275474 3.074589 0.606510 00:00
12 4.086166 2.435566 0.618490 00:00
13 3.876294 1.919376 0.653125 00:00
14 3.658365 1.575526 0.678906 00:00
15 3.444104 1.331026 0.704167 00:00
16 3.236078 1.157221 0.741406 00:00
17 3.041204 1.036469 0.759115 00:00
18 2.861330 0.940342 0.781250 00:00
19 2.695106 0.874925 0.793229 00:00
20 2.541441 0.828539 0.793229 00:00
21 2.402561 0.794235 0.810156 00:00
22 2.273550 0.768929 0.811458 00:00
23 2.155581 0.762280 0.803125 00:00
24 2.049151 0.747793 0.812240 00:00
25 1.949112 0.741491 0.813802 00:00
26 1.859360 0.733996 0.813542 00:00
27 1.777787 0.728658 0.815104 00:00
28 1.701930 0.729377 0.814583 00:00
29 1.631600 0.725585 0.814063 00:00
30 1.567907 0.721479 0.814063 00:00
31 1.509243 0.720281 0.814583 00:00
32 1.454885 0.721088 0.813802 00:00
33 1.405090 0.720450 0.814323 00:00
34 1.358923 0.716172 0.815104 00:00
35 1.316465 0.716573 0.815365 00:00
36 1.276987 0.716664 0.813802 00:00
37 1.240786 0.714286 0.814323 00:00
38 1.206641 0.714445 0.813802 00:00
39 1.175316 0.713684 0.814323 00:00
40 1.145060 0.713118 0.814583 00:00
41 1.117517 0.710906 0.816927 00:00
42 1.090879 0.713578 0.814323 00:00
43 1.066236 0.712237 0.814583 00:00
44 1.043818 0.711371 0.815365 00:00
45 1.023188 0.710751 0.815365 00:00
46 1.004117 0.711486 0.814323 00:00
47 0.985909 0.711122 0.813802 00:00
48 0.968562 0.710343 0.815885 00:00
49 0.952481 0.711430 0.815365 00:00
50 0.938257 0.711884 0.814844 00:00
Paths:  318 , Tokens:  150
[8, 56, 79, 127]
0
Adj. Rand Index Score: 0.543752.
Adj. Mutual Info Score: 0.542515.
Classes:  [0. 1.] n_clusters:  2.0
Seed:  22446688
epoch train_loss valid_loss accuracy time
1 5.084162 4.978789 0.002083 00:00
2 5.061502 4.972279 0.001302 00:00
3 5.033481 4.957591 0.009896 00:00
4 4.991902 4.930260 0.039583 00:00
5 4.938540 4.881417 0.144792 00:00
6 4.874043 4.800850 0.217448 00:00
7 4.796447 4.670933 0.311979 00:00
8 4.706205 4.469481 0.390625 00:00
9 4.598732 4.152863 0.461198 00:00
10 4.471431 3.690967 0.541146 00:00
11 4.320179 3.121761 0.610677 00:00
12 4.142712 2.533847 0.643490 00:00
13 3.942793 2.017585 0.662500 00:00
14 3.731364 1.650611 0.682031 00:00
15 3.517501 1.417474 0.707552 00:00
16 3.311614 1.233590 0.723958 00:00
17 3.118135 1.104095 0.745833 00:00
18 2.936595 0.998106 0.764583 00:00
19 2.768571 0.923884 0.776042 00:00
20 2.611956 0.868402 0.790625 00:00
21 2.467072 0.822884 0.804948 00:00
22 2.335667 0.791208 0.809375 00:00
23 2.214746 0.773239 0.811719 00:00
24 2.103855 0.756911 0.812500 00:00
25 2.002645 0.749180 0.811979 00:00
26 1.909560 0.739921 0.811979 00:00
27 1.824234 0.734665 0.814323 00:00
28 1.746520 0.728702 0.813802 00:00
29 1.674873 0.725522 0.814844 00:00
30 1.608321 0.722637 0.815625 00:00
31 1.546618 0.719389 0.813802 00:00
32 1.489218 0.719761 0.814583 00:00
33 1.436940 0.718789 0.813802 00:00
34 1.388551 0.718366 0.811979 00:00
35 1.343403 0.715756 0.813281 00:00
36 1.301745 0.714715 0.811458 00:00
37 1.263082 0.714210 0.812500 00:00
38 1.227539 0.714555 0.813542 00:00
39 1.193965 0.712680 0.815104 00:00
40 1.162813 0.711533 0.814583 00:00
41 1.133311 0.713129 0.814063 00:00
42 1.106414 0.711418 0.816146 00:00
43 1.081423 0.711828 0.813802 00:00
44 1.058762 0.712957 0.814063 00:00
45 1.036988 0.712601 0.814063 00:00
46 1.016708 0.713975 0.815104 00:00
47 0.997987 0.711145 0.814063 00:00
48 0.980055 0.712512 0.814844 00:00
49 0.962953 0.712250 0.814323 00:00
50 0.947530 0.711976 0.815104 00:00
Paths:  318 , Tokens:  150
[8, 56, 79, 127]
0
Adj. Rand Index Score: 0.543752.
Adj. Mutual Info Score: 0.542515.
Classes:  [0. 1.] n_clusters:  2.0
Seed:  123456789
epoch train_loss valid_loss accuracy time
1 5.080513 5.039024 0.000781 00:00
2 5.065538 5.028779 0.001042 00:00
3 5.039251 5.010608 0.008333 00:00
4 5.002638 4.979975 0.027865 00:00
5 4.955206 4.929317 0.055729 00:00
6 4.895703 4.848574 0.178125 00:00
7 4.825160 4.719636 0.247917 00:00
8 4.738688 4.518165 0.367969 00:00
9 4.632381 4.211682 0.456510 00:00
10 4.504340 3.746900 0.508854 00:00
11 4.347709 3.156683 0.578906 00:00
12 4.164800 2.508336 0.595313 00:00
13 3.959991 1.999943 0.642188 00:00
14 3.747312 1.666628 0.668490 00:00
15 3.533387 1.405532 0.697396 00:00
16 3.326022 1.220334 0.729688 00:00
17 3.129385 1.076680 0.756250 00:00
18 2.943322 0.969913 0.776563 00:00
19 2.772202 0.890065 0.793229 00:00
20 2.612848 0.838147 0.801823 00:00
21 2.466713 0.801093 0.807552 00:00
22 2.332352 0.784425 0.808333 00:00
23 2.209010 0.756983 0.812760 00:00
24 2.097059 0.748420 0.814063 00:00
25 1.995533 0.739219 0.814583 00:00
26 1.902775 0.736002 0.813802 00:00
27 1.816741 0.728255 0.813281 00:00
28 1.739029 0.732671 0.813021 00:00
29 1.667408 0.723062 0.814844 00:00
30 1.600975 0.722526 0.814323 00:00
31 1.539936 0.723015 0.814063 00:00
32 1.483595 0.720027 0.813281 00:00
33 1.430766 0.718962 0.813542 00:00
34 1.382657 0.716166 0.814583 00:00
35 1.338040 0.716900 0.813802 00:00
36 1.297189 0.715151 0.814844 00:00
37 1.258357 0.713379 0.815365 00:00
38 1.222637 0.714045 0.814323 00:00
39 1.189103 0.714204 0.814583 00:00
40 1.158158 0.714074 0.814323 00:00
41 1.129411 0.713255 0.815365 00:00
42 1.102571 0.712319 0.814063 00:00
43 1.077477 0.711571 0.815104 00:00
44 1.054465 0.711778 0.815365 00:00
45 1.032535 0.711855 0.814583 00:00
46 1.012852 0.711828 0.814063 00:00
47 0.994391 0.711362 0.814844 00:00
48 0.976085 0.710760 0.814844 00:00
49 0.960073 0.711206 0.815104 00:00
50 0.944573 0.711677 0.814844 00:00
Paths:  318 , Tokens:  150
[8, 56, 79, 127]
Adj. Rand Index Score: 0.543752.
Adj. Mutual Info Score: 0.542515.
Classes:  [0. 1.] n_clusters:  2.0
Seed:  987654321
epoch train_loss valid_loss accuracy time
1 5.051621 5.004432 0.004687 00:00
2 5.023418 4.991358 0.009635 00:00
3 4.985503 4.969084 0.036458 00:00
4 4.934233 4.930178 0.075260 00:00
5 4.867278 4.865328 0.201302 00:00
6 4.784426 4.756937 0.305208 00:00
7 4.684152 4.584362 0.400521 00:00
8 4.563793 4.313743 0.436198 00:00
9 4.423436 3.908403 0.480990 00:00
10 4.259247 3.353637 0.535938 00:00
11 4.073395 2.738281 0.562760 00:00
12 3.870864 2.171305 0.616927 00:00
13 3.662017 1.744116 0.670833 00:00
14 3.451260 1.457988 0.689844 00:00
15 3.247507 1.249424 0.721615 00:00
16 3.054935 1.107257 0.745833 00:00
17 2.873478 0.996656 0.764844 00:00
18 2.705188 0.916174 0.787500 00:00
19 2.548430 0.857639 0.796354 00:00
20 2.405813 0.815813 0.806250 00:00
21 2.274655 0.781133 0.810156 00:00
22 2.155682 0.758860 0.811979 00:00
23 2.046138 0.749929 0.812760 00:00
24 1.945396 0.738105 0.813802 00:00
25 1.854796 0.733959 0.813542 00:00
26 1.771554 0.734475 0.813802 00:00
27 1.695548 0.726979 0.815104 00:00
28 1.624893 0.724410 0.815104 00:00
29 1.560168 0.721679 0.814323 00:00
30 1.501777 0.721409 0.814063 00:00
31 1.447325 0.719241 0.814063 00:00
32 1.397539 0.716861 0.814583 00:00
33 1.351385 0.715343 0.815625 00:00
34 1.308791 0.715934 0.814323 00:00
35 1.269576 0.715166 0.814063 00:00
36 1.233084 0.714510 0.814323 00:00
37 1.198578 0.713738 0.815104 00:00
38 1.166261 0.713647 0.816146 00:00
39 1.137509 0.713428 0.814323 00:00
40 1.110412 0.712968 0.814844 00:00
41 1.084492 0.713432 0.814323 00:00
42 1.060961 0.712452 0.814323 00:00
43 1.038633 0.710973 0.815104 00:00
44 1.017713 0.711481 0.816667 00:00
45 0.998368 0.711188 0.816146 00:00
46 0.980284 0.712332 0.815885 00:00
47 0.964079 0.711588 0.814583 00:00
48 0.947888 0.709864 0.816146 00:00
49 0.932831 0.711388 0.813542 00:00
50 0.919292 0.710636 0.815104 00:00
Paths:  318 , Tokens:  150
[8, 56, 79, 127]
0
Adj. Rand Index Score: 0.543752.
Adj. Mutual Info Score: 0.542515.
Classes:  [0. 1.] n_clusters:  2.0
Seed:  86420
epoch train_loss valid_loss accuracy time
1 5.062465 5.034970 0.002604 00:00
2 5.041939 5.024042 0.002344 00:00
3 5.009276 5.005023 0.003385 00:00
4 4.967320 4.971904 0.018229 00:00
5 4.912605 4.916266 0.066146 00:00
6 4.841670 4.824973 0.220833 00:00
7 4.756890 4.676383 0.304688 00:00
8 4.655982 4.437275 0.386198 00:00
9 4.534377 4.064283 0.423698 00:00
10 4.391213 3.547959 0.513802 00:00
11 4.222950 2.917741 0.563802 00:00
12 4.030290 2.311091 0.606510 00:00
13 3.820435 1.846185 0.650521 00:00
14 3.604205 1.524963 0.686979 00:00
15 3.392763 1.301613 0.716927 00:00
16 3.186919 1.136265 0.753125 00:00
17 2.994414 1.010122 0.766927 00:00
18 2.815772 0.928209 0.780469 00:00
19 2.650338 0.865135 0.797656 00:00
20 2.500581 0.819734 0.801302 00:00
21 2.362325 0.793434 0.807813 00:00
22 2.237303 0.769273 0.810156 00:00
23 2.122966 0.755138 0.812500 00:00
24 2.018163 0.747379 0.811719 00:00
25 1.922473 0.740426 0.812500 00:00
26 1.835114 0.735446 0.812500 00:00
27 1.754380 0.731640 0.814323 00:00
28 1.680462 0.726926 0.813802 00:00
29 1.612873 0.723810 0.814063 00:00
30 1.549685 0.721998 0.814844 00:00
31 1.491369 0.720166 0.815104 00:00
32 1.438699 0.721309 0.813542 00:00
33 1.389183 0.719433 0.813542 00:00
34 1.343577 0.720069 0.814063 00:00
35 1.302128 0.716098 0.814583 00:00
36 1.262870 0.716497 0.815104 00:00
37 1.226923 0.716386 0.813542 00:00
38 1.193711 0.714971 0.813542 00:00
39 1.161656 0.714142 0.813802 00:00
40 1.132985 0.713628 0.815885 00:00
41 1.105423 0.714364 0.813281 00:00
42 1.080527 0.714796 0.815104 00:00
43 1.056852 0.713363 0.814583 00:00
44 1.035674 0.712588 0.815104 00:00
45 1.014993 0.712605 0.814323 00:00
46 0.996310 0.712647 0.814323 00:00
47 0.978697 0.712741 0.813802 00:00
48 0.961760 0.712424 0.814844 00:00
49 0.945440 0.712350 0.813802 00:00
50 0.930910 0.711798 0.813802 00:00
Paths:  318 , Tokens:  150
[8, 56, 79, 127]
0
Adj. Rand Index Score: 0.543752.
Adj. Mutual Info Score: 0.542515.
Classes:  [0. 1.] n_clusters:  2.0


File Name to Train:  data_sets/00_bunch/mnist/mnist.csv
#############################################################
HDBSCAN
#############################################################
Adj. Rand Index Score: 0.751959.
Adj. Mutual Info Score: 0.811806.
Classes:  [0 1 2 3 4 5 6 7 8] n_clusters:  9
QSHIFTPP
#############################################################
Adj. Rand Index Score: 0.815115.
Adj. Mutual Info Score: 0.838626.
Classes:  [ 0  1  2  3  4  5  6  7  8  9 10 11] n_clusters:  12
HQSHIFT
#############################################################
Adj. Rand Index Score: 0.751959.
Adj. Mutual Info Score: 0.811806.
Classes:  [0 1 2 3 4 5 6 7 8] n_clusters:  9
Seed:  0
epoch train_loss valid_loss accuracy time
1 7.139834 6.856246 0.157801 00:00
2 6.495890 5.880241 0.386316 00:00
3 5.332153 4.197858 0.536340 00:00
4 3.617595 2.325981 0.667140 00:00
5 2.214389 1.379493 0.783842 00:00
6 1.388684 0.919938 0.842874 00:00
7 0.941080 0.681407 0.881380 00:00
8 0.704067 0.563072 0.897052 00:00
9 0.588664 0.520100 0.906475 00:00
10 0.534158 0.503652 0.905374 00:00
11 0.512362 0.505147 0.909470 00:00
12 0.502439 0.498852 0.909387 00:00
13 0.498642 0.497989 0.903516 00:00
14 0.494438 0.489860 0.909541 00:00
15 0.489800 0.490640 0.908771 00:00
16 0.492037 0.494470 0.907315 00:00
17 0.490121 0.486527 0.908180 00:00
18 0.490851 0.488768 0.909659 00:00
19 0.488004 0.484760 0.908499 00:00
20 0.485410 0.485034 0.909659 00:00
21 0.484778 0.483784 0.909612 00:00
22 0.481594 0.483889 0.905682 00:00
23 0.482933 0.485561 0.909612 00:00
24 0.481511 0.479609 0.909612 00:00
25 0.480964 0.482648 0.909564 00:00
26 0.479145 0.481213 0.909576 00:00
27 0.479743 0.481316 0.905895 00:00
28 0.478331 0.480465 0.909612 00:00
29 0.479267 0.478104 0.909612 00:00
30 0.480463 0.480718 0.909588 00:00
31 0.479758 0.478105 0.909564 00:00
32 0.477829 0.476765 0.909659 00:00
33 0.477735 0.476978 0.909612 00:00
34 0.476296 0.478712 0.909600 00:00
35 0.476776 0.477263 0.907517 00:00
36 0.476390 0.475721 0.909647 00:00
37 0.474642 0.475749 0.909600 00:00
38 0.475441 0.474796 0.909588 00:00
39 0.473267 0.474929 0.909600 00:00
40 0.473675 0.474736 0.909612 00:00
41 0.474492 0.474019 0.909612 00:00
42 0.472443 0.474091 0.909588 00:00
43 0.472998 0.474064 0.909600 00:00
44 0.470832 0.473647 0.909647 00:00
45 0.473936 0.473568 0.909612 00:00
46 0.472841 0.473556 0.909588 00:00
47 0.470716 0.473463 0.909635 00:00
48 0.473756 0.473491 0.909588 00:00
49 0.473536 0.473477 0.909624 00:00
50 0.472249 0.473510 0.909647 00:00
/home/can/anaconda3/envs/hqshift/lib/python3.6/site-packages/fastai/datasets.py:153: YAMLLoadWarning: calling yaml.load() without Loader=... is deprecated, as the default Loader is unsafe. Please read https://msg.pyyaml.org/load for full details.
  with open(fpath, 'r') as yaml_file: return yaml.load(yaml_file)
Paths:  4685 , Tokens:  1797
[167, 352, 1076, 1457, 1505, 1670, 1741, 1762, 1764]
0
0
Adj. Rand Index Score: 0.770185.
Adj. Mutual Info Score: 0.826536.
Classes:  [0. 1. 2. 3. 4. 5. 6. 7. 8.] n_clusters:  9.0
Seed:  23
epoch train_loss valid_loss accuracy time
1 7.262473 7.039093 0.118963 00:00
2 6.720097 6.198088 0.369957 00:00
3 5.655553 4.538759 0.550225 00:00
4 3.883855 2.475403 0.648923 00:00
5 2.363581 1.459030 0.773745 00:00
6 1.456707 0.951614 0.839145 00:00
7 0.969005 0.686519 0.882351 00:00
8 0.714755 0.574102 0.896058 00:00
9 0.591427 0.526857 0.900095 00:00
10 0.532128 0.512261 0.907008 00:00
11 0.510479 0.498501 0.909399 00:00
12 0.500510 0.498322 0.905563 00:00
13 0.497355 0.488452 0.909470 00:00
14 0.496399 0.497573 0.906972 00:00
15 0.493976 0.490543 0.909576 00:00
16 0.489823 0.486393 0.908819 00:00
17 0.490957 0.491240 0.906984 00:00
18 0.487458 0.483669 0.909576 00:00
19 0.485854 0.487333 0.909588 00:00
20 0.486640 0.486980 0.908925 00:00
21 0.484153 0.482609 0.909505 00:00
22 0.483531 0.478937 0.908404 00:00
23 0.482336 0.481828 0.909624 00:00
24 0.481330 0.479362 0.909612 00:00
25 0.478200 0.478646 0.909600 00:00
26 0.478573 0.482239 0.909612 00:00
27 0.478610 0.478790 0.909624 00:00
28 0.478344 0.478154 0.909576 00:00
29 0.477875 0.479084 0.909600 00:00
30 0.477203 0.477124 0.909564 00:00
31 0.478529 0.478250 0.909624 00:00
32 0.477337 0.477212 0.909588 00:00
33 0.475274 0.476131 0.909553 00:00
34 0.474690 0.475484 0.909612 00:00
35 0.476066 0.476205 0.909576 00:00
36 0.474963 0.475565 0.909588 00:00
37 0.474402 0.475218 0.909600 00:00
38 0.474527 0.474698 0.909600 00:00
39 0.473419 0.475215 0.909553 00:00
40 0.474944 0.474303 0.909612 00:00
41 0.474215 0.474101 0.909600 00:00
42 0.476482 0.473975 0.909612 00:00
43 0.474626 0.473904 0.909612 00:00
44 0.473840 0.473713 0.909612 00:00
45 0.473789 0.473644 0.909588 00:00
46 0.474323 0.473460 0.909612 00:00
47 0.471834 0.473436 0.909576 00:00
48 0.471923 0.473423 0.909553 00:00
49 0.472616 0.473414 0.909600 00:00
50 0.471968 0.473415 0.909588 00:00
Paths:  4685 , Tokens:  1797
[167, 352, 1076, 1457, 1505, 1670, 1741, 1762, 1764]
0
Adj. Rand Index Score: 0.768024.
Adj. Mutual Info Score: 0.823528.
Classes:  [0. 1. 2. 3. 4. 5. 6. 7. 8.] n_clusters:  9.0
Seed:  42
epoch train_loss valid_loss accuracy time
1 7.237183 7.008329 0.092448 00:00
2 6.689783 6.174602 0.318987 00:00
3 5.623013 4.502342 0.511174 00:00
4 3.887360 2.514932 0.645147 00:00
5 2.375195 1.462777 0.770691 00:00
6 1.462708 0.941634 0.841170 00:00
7 0.968798 0.694985 0.879545 00:00
8 0.714658 0.563596 0.901184 00:00
9 0.589071 0.524884 0.906984 00:00
10 0.532819 0.505758 0.906830 00:00
11 0.510661 0.505767 0.902794 00:00
12 0.502772 0.500097 0.904652 00:00
13 0.497606 0.498057 0.909564 00:00
14 0.495466 0.491697 0.908345 00:00
15 0.492932 0.490789 0.907860 00:00
16 0.493489 0.495939 0.905575 00:00
17 0.490989 0.489260 0.909576 00:00
18 0.490066 0.489171 0.909588 00:00
19 0.487431 0.486174 0.909541 00:00
20 0.487017 0.488200 0.909576 00:00
21 0.486993 0.486831 0.909541 00:00
22 0.485170 0.489099 0.905646 00:00
23 0.483590 0.484716 0.909565 00:00
24 0.482284 0.484157 0.908688 00:00
25 0.482149 0.480686 0.909612 00:00
26 0.482347 0.479204 0.909635 00:00
27 0.480474 0.478702 0.909565 00:00
28 0.480784 0.480063 0.909612 00:00
29 0.479200 0.478441 0.909564 00:00
30 0.479817 0.479259 0.909588 00:00
31 0.477221 0.478664 0.907031 00:00
32 0.477068 0.477983 0.909553 00:00
33 0.476993 0.477829 0.907315 00:00
34 0.479856 0.478425 0.909505 00:00
35 0.477660 0.476007 0.909564 00:00
36 0.478040 0.475963 0.909647 00:00
37 0.476770 0.477255 0.909612 00:00
38 0.478243 0.475564 0.909635 00:00
39 0.475494 0.474692 0.909659 00:00
40 0.475458 0.474561 0.909576 00:00
41 0.476398 0.474634 0.909635 00:00
42 0.473488 0.474357 0.909600 00:00
43 0.473692 0.474145 0.909576 00:00
44 0.472766 0.474005 0.909600 00:00
45 0.473932 0.473798 0.909600 00:00
46 0.472268 0.473732 0.909647 00:00
47 0.472926 0.473672 0.909671 00:00
48 0.474104 0.473675 0.909624 00:00
49 0.473660 0.473656 0.909624 00:00
50 0.473433 0.473593 0.909659 00:00
Paths:  4685 , Tokens:  1797
[167, 352, 1076, 1457, 1505, 1670, 1741, 1762, 1764]
Adj. Rand Index Score: 0.768024.
Adj. Mutual Info Score: 0.823528.
Classes:  [0. 1. 2. 3. 4. 5. 6. 7. 8.] n_clusters:  9.0
Seed:  1234
epoch train_loss valid_loss accuracy time
1 7.214774 6.991594 0.128492 00:00
2 6.655304 6.115685 0.380149 00:00
3 5.582345 4.473593 0.551219 00:00
4 3.837204 2.467131 0.655599 00:00
5 2.332497 1.428497 0.779711 00:00
6 1.438917 0.935282 0.843703 00:00
7 0.958817 0.681576 0.882706 00:00
8 0.707217 0.564512 0.899858 00:00
9 0.586759 0.523660 0.907209 00:00
10 0.531431 0.505467 0.906877 00:00
11 0.508525 0.496894 0.909387 00:00
12 0.498237 0.492717 0.908487 00:00
13 0.497463 0.492242 0.906794 00:00
14 0.494345 0.494157 0.907730 00:00
15 0.491445 0.488916 0.909517 00:00
16 0.488471 0.491487 0.909162 00:00
17 0.487839 0.489087 0.909198 00:00
18 0.489186 0.488720 0.906700 00:00
19 0.487020 0.482693 0.909553 00:00
20 0.484953 0.481565 0.909564 00:00
21 0.484067 0.487397 0.909600 00:00
22 0.481917 0.480557 0.909635 00:00
23 0.481267 0.481233 0.909541 00:00
24 0.483359 0.480320 0.909576 00:00
25 0.480955 0.479775 0.909576 00:00
26 0.479888 0.480651 0.909564 00:00
27 0.480122 0.477693 0.908594 00:00
28 0.478762 0.477567 0.909564 00:00
29 0.479893 0.479124 0.909588 00:00
30 0.478145 0.476815 0.909588 00:00
31 0.478366 0.478124 0.909588 00:00
32 0.477058 0.477589 0.909541 00:00
33 0.476952 0.476330 0.909612 00:00
34 0.475457 0.476516 0.909635 00:00
35 0.474447 0.477025 0.909635 00:00
36 0.475257 0.475721 0.909600 00:00
37 0.477449 0.474758 0.909588 00:00
38 0.477072 0.475013 0.909612 00:00
39 0.475678 0.475180 0.909612 00:00
40 0.475252 0.474115 0.909612 00:00
41 0.473829 0.474066 0.909541 00:00
42 0.474177 0.473926 0.909600 00:00
43 0.474295 0.474014 0.909600 00:00
44 0.474327 0.473713 0.909612 00:00
45 0.475060 0.473690 0.909612 00:00
46 0.473067 0.473501 0.909600 00:00
47 0.472677 0.473487 0.909576 00:00
48 0.471514 0.473457 0.909659 00:00
49 0.471633 0.473451 0.909635 00:00
50 0.471660 0.473440 0.909612 00:00
Paths:  4685 , Tokens:  1797
[167, 352, 1076, 1457, 1505, 1670, 1741, 1762, 1764]
0
Adj. Rand Index Score: 0.768024.
Adj. Mutual Info Score: 0.823528.
Classes:  [0. 1. 2. 3. 4. 5. 6. 7. 8.] n_clusters:  9.0
Seed:  43210
epoch train_loss valid_loss accuracy time
1 7.265277 7.073403 0.115376 00:00
2 6.791418 6.322806 0.367318 00:00
3 5.815885 4.777507 0.525379 00:00
4 4.082221 2.651611 0.628847 00:00
5 2.500208 1.528695 0.766181 00:00
6 1.523875 0.959164 0.840803 00:00
7 0.993554 0.699991 0.877261 00:00
8 0.723751 0.568089 0.899254 00:00
9 0.592937 0.528485 0.903634 00:00
10 0.537050 0.504412 0.904865 00:00
11 0.512134 0.501395 0.909434 00:00
12 0.503093 0.500311 0.907599 00:00
13 0.497467 0.490093 0.907149 00:00
14 0.495976 0.493686 0.905398 00:00
15 0.492793 0.489005 0.906889 00:00
16 0.489818 0.488660 0.909564 00:00
17 0.490158 0.487568 0.909553 00:00
18 0.488628 0.485087 0.909233 00:00
19 0.488076 0.487984 0.906913 00:00
20 0.486229 0.485579 0.909529 00:00
21 0.488159 0.485597 0.907955 00:00
22 0.484318 0.482837 0.909588 00:00
23 0.482363 0.482621 0.909245 00:00
24 0.482733 0.482883 0.909564 00:00
25 0.481931 0.478743 0.909541 00:00
26 0.482197 0.479276 0.908546 00:00
27 0.481692 0.477649 0.909576 00:00
28 0.480168 0.478763 0.909541 00:00
29 0.479258 0.478927 0.909564 00:00
30 0.478619 0.480103 0.909576 00:00
31 0.477905 0.477152 0.909576 00:00
32 0.476977 0.477008 0.909600 00:00
33 0.479849 0.475793 0.909635 00:00
34 0.477437 0.476124 0.909624 00:00
35 0.477674 0.476044 0.909588 00:00
36 0.474486 0.476475 0.909588 00:00
37 0.476539 0.475458 0.909564 00:00
38 0.475261 0.475011 0.909600 00:00
39 0.475009 0.474670 0.909600 00:00
40 0.473672 0.474698 0.909588 00:00
41 0.475699 0.474827 0.909600 00:00
42 0.475191 0.474268 0.909647 00:00
43 0.472797 0.474090 0.909600 00:00
44 0.473153 0.473908 0.909600 00:00
45 0.473436 0.473720 0.909635 00:00
46 0.470849 0.473692 0.909636 00:00
47 0.472753 0.473630 0.909564 00:00
48 0.471956 0.473565 0.909600 00:00
49 0.474392 0.473596 0.909612 00:00
50 0.472514 0.473563 0.909612 00:00
Paths:  4685 , Tokens:  1797
[167, 352, 1076, 1457, 1505, 1670, 1741, 1762, 1764]
Adj. Rand Index Score: 0.768024.
Adj. Mutual Info Score: 0.823528.
Classes:  [0. 1. 2. 3. 4. 5. 6. 7. 8.] n_clusters:  9.0
Seed:  1133557799
epoch train_loss valid_loss accuracy time
1 7.158236 6.923108 0.121970 00:00
2 6.596023 6.048266 0.408535 00:00
3 5.502136 4.355269 0.544425 00:00
4 3.753012 2.406007 0.662725 00:00
5 2.290431 1.420156 0.782907 00:00
6 1.420374 0.919498 0.846543 00:00
7 0.947983 0.674895 0.882505 00:00
8 0.700927 0.560444 0.901882 00:00
9 0.582936 0.520624 0.905114 00:00
10 0.532979 0.507758 0.904001 00:00
11 0.507965 0.492385 0.906866 00:00
12 0.499316 0.499180 0.907481 00:00
13 0.496805 0.493971 0.909541 00:00
14 0.494263 0.494097 0.909008 00:00
15 0.492580 0.487731 0.909186 00:00
16 0.491161 0.487225 0.909553 00:00
17 0.489850 0.485235 0.908452 00:00
18 0.486928 0.485447 0.906108 00:00
19 0.486541 0.483199 0.909588 00:00
20 0.486892 0.481990 0.909588 00:00
21 0.483796 0.484071 0.908369 00:00
22 0.482945 0.486581 0.906416 00:00
23 0.481388 0.481783 0.909576 00:00
24 0.482067 0.481536 0.906984 00:00
25 0.479755 0.478909 0.909612 00:00
26 0.480078 0.478413 0.909576 00:00
27 0.478744 0.479727 0.909576 00:00
28 0.478335 0.479477 0.909553 00:00
29 0.478355 0.477127 0.909588 00:00
30 0.477056 0.477178 0.909541 00:00
31 0.477162 0.477514 0.909576 00:00
32 0.475935 0.475968 0.909588 00:00
33 0.477152 0.477162 0.909588 00:00
34 0.476279 0.477042 0.909576 00:00
35 0.477178 0.476951 0.909624 00:00
36 0.475559 0.475407 0.909553 00:00
37 0.474402 0.474528 0.909612 00:00
38 0.475989 0.474738 0.909576 00:00
39 0.475203 0.474549 0.909600 00:00
40 0.474624 0.474426 0.909624 00:00
41 0.475331 0.474031 0.909564 00:00
42 0.473113 0.473770 0.909612 00:00
43 0.473086 0.473710 0.909600 00:00
44 0.473684 0.473568 0.909624 00:00
45 0.472907 0.473507 0.909600 00:00
46 0.472815 0.473549 0.909565 00:00
47 0.471586 0.473470 0.909612 00:00
48 0.472053 0.473480 0.909612 00:00
49 0.472628 0.473463 0.909600 00:00
50 0.473885 0.473486 0.909600 00:00
Paths:  4685 , Tokens:  1797
[167, 352, 1076, 1457, 1505, 1670, 1741, 1762, 1764]
Adj. Rand Index Score: 0.768024.
Adj. Mutual Info Score: 0.823528.
Classes:  [0. 1. 2. 3. 4. 5. 6. 7. 8.] n_clusters:  9.0
Seed:  22446688
epoch train_loss valid_loss accuracy time
1 7.238947 7.043081 0.121745 00:00
2 6.757493 6.259144 0.373840 00:00
3 5.759178 4.690617 0.540495 00:00
4 4.008903 2.599539 0.642247 00:00
5 2.444153 1.507338 0.769129 00:00
6 1.506878 0.978999 0.838873 00:00
7 0.994948 0.692139 0.880646 00:00
8 0.724774 0.571873 0.899278 00:00
9 0.597714 0.521404 0.904380 00:00
10 0.537247 0.504038 0.905078 00:00
11 0.513395 0.501475 0.907102 00:00
12 0.501942 0.491562 0.909541 00:00
13 0.497007 0.499324 0.908463 00:00
14 0.495277 0.493989 0.904178 00:00
15 0.491108 0.493409 0.909564 00:00
16 0.491773 0.489198 0.908120 00:00
17 0.489184 0.483451 0.908902 00:00
18 0.487346 0.491041 0.908511 00:00
19 0.486141 0.483380 0.909600 00:00
20 0.485188 0.482965 0.908097 00:00
21 0.484326 0.493619 0.903989 00:00
22 0.485536 0.488849 0.909588 00:00
23 0.482960 0.482557 0.909588 00:00
24 0.482638 0.479890 0.909612 00:00
25 0.482563 0.487752 0.909564 00:00
26 0.480852 0.479087 0.909612 00:00
27 0.480019 0.481604 0.909612 00:00
28 0.477972 0.479393 0.909576 00:00
29 0.479797 0.481336 0.909600 00:00
30 0.479706 0.477450 0.909564 00:00
31 0.479115 0.477441 0.909635 00:00
32 0.478287 0.477494 0.909647 00:00
33 0.476412 0.478482 0.909600 00:00
34 0.474765 0.477426 0.909576 00:00
35 0.476755 0.476646 0.909612 00:00
36 0.477862 0.475086 0.909576 00:00
37 0.477082 0.475407 0.909659 00:00
38 0.477076 0.475234 0.909624 00:00
39 0.475062 0.475321 0.909576 00:00
40 0.474797 0.474621 0.909624 00:00
41 0.474567 0.474700 0.909576 00:00
42 0.473946 0.474266 0.909576 00:00
43 0.474285 0.474017 0.909600 00:00
44 0.473647 0.473785 0.909635 00:00
45 0.474183 0.473630 0.909576 00:00
46 0.474191 0.473657 0.909600 00:00
47 0.475801 0.473631 0.909553 00:00
48 0.473713 0.473549 0.909588 00:00
49 0.473379 0.473526 0.909600 00:00
50 0.474855 0.473498 0.909600 00:00
Paths:  4685 , Tokens:  1797
[167, 352, 1076, 1457, 1505, 1670, 1741, 1762, 1764]
Adj. Rand Index Score: 0.768024.
Adj. Mutual Info Score: 0.823528.
Classes:  [0. 1. 2. 3. 4. 5. 6. 7. 8.] n_clusters:  9.0
Seed:  123456789
epoch train_loss valid_loss accuracy time
1 7.193827 6.982358 0.107102 00:00
2 6.662144 6.123871 0.394058 00:00
3 5.586638 4.466891 0.533499 00:00
4 3.848895 2.505474 0.638861 00:00
5 2.374017 1.470520 0.770431 00:00
6 1.471580 0.951046 0.843513 00:00
7 0.972295 0.689432 0.874740 00:00
8 0.721314 0.582920 0.891714 00:00
9 0.595676 0.522418 0.905611 00:00
10 0.541291 0.507318 0.902332 00:00
11 0.511390 0.507058 0.905303 00:00
12 0.505448 0.496534 0.909517 00:00
13 0.496391 0.488733 0.909482 00:00
14 0.493435 0.496408 0.909268 00:00
15 0.491447 0.491266 0.909600 00:00
16 0.491686 0.490357 0.908511 00:00
17 0.490390 0.490522 0.909600 00:00
18 0.490567 0.485864 0.907600 00:00
19 0.488294 0.488276 0.908049 00:00
20 0.483675 0.483098 0.909576 00:00
21 0.483228 0.482630 0.909576 00:00
22 0.482216 0.482832 0.909600 00:00
23 0.482532 0.482412 0.908582 00:00
24 0.482930 0.484539 0.909600 00:00
25 0.481633 0.478996 0.909564 00:00
26 0.480844 0.480753 0.909647 00:00
27 0.481676 0.479475 0.909576 00:00
28 0.480203 0.478419 0.909553 00:00
29 0.478597 0.478354 0.909600 00:00
30 0.477438 0.477923 0.909600 00:00
31 0.477813 0.477213 0.909635 00:00
32 0.476901 0.476789 0.909624 00:00
33 0.475354 0.475937 0.909635 00:00
34 0.475851 0.475580 0.909588 00:00
35 0.476151 0.476213 0.909600 00:00
36 0.476818 0.475552 0.909576 00:00
37 0.476841 0.475861 0.909588 00:00
38 0.476551 0.475532 0.909553 00:00
39 0.476691 0.474675 0.909635 00:00
40 0.475022 0.474213 0.909588 00:00
41 0.474764 0.474242 0.909612 00:00
42 0.474762 0.473871 0.909612 00:00
43 0.471577 0.473844 0.909588 00:00
44 0.472771 0.473752 0.909588 00:00
45 0.474547 0.473595 0.909612 00:00
46 0.472962 0.473533 0.909588 00:00
47 0.473925 0.473503 0.909600 00:00
48 0.473991 0.473528 0.909588 00:00
49 0.472428 0.473534 0.909600 00:00
50 0.472624 0.473466 0.909623 00:00
Paths:  4685 , Tokens:  1797
[167, 352, 1076, 1457, 1505, 1670, 1741, 1762, 1764]
Adj. Rand Index Score: 0.768024.
Adj. Mutual Info Score: 0.823528.
Classes:  [0. 1. 2. 3. 4. 5. 6. 7. 8.] n_clusters:  9.0
Seed:  987654321
epoch train_loss valid_loss accuracy time
1 7.243691 7.036757 0.126148 00:00
2 6.737897 6.245736 0.366915 00:00
3 5.731721 4.698088 0.530564 00:00
4 4.001667 2.616205 0.639737 00:00
5 2.449496 1.509256 0.765566 00:00
6 1.506166 0.965863 0.836813 00:00
7 0.988189 0.698956 0.880350 00:00
8 0.723619 0.569349 0.899882 00:00
9 0.593381 0.523520 0.907162 00:00
10 0.539648 0.509826 0.906629 00:00
11 0.513717 0.499559 0.908902 00:00
12 0.503314 0.494321 0.909469 00:00
13 0.496354 0.493107 0.906960 00:00
14 0.496054 0.498249 0.909576 00:00
15 0.493805 0.495532 0.905990 00:00
16 0.490835 0.492598 0.905931 00:00
17 0.490568 0.490609 0.907008 00:00
18 0.490471 0.486718 0.907114 00:00
19 0.489108 0.486698 0.905421 00:00
20 0.488034 0.484514 0.908819 00:00
21 0.486054 0.486162 0.909541 00:00
22 0.483412 0.481096 0.909541 00:00
23 0.485398 0.483017 0.909588 00:00
24 0.482331 0.483514 0.907055 00:00
25 0.480475 0.482760 0.908854 00:00
26 0.481265 0.484270 0.906593 00:00
27 0.479780 0.478685 0.909564 00:00
28 0.479642 0.479131 0.909564 00:00
29 0.478889 0.480207 0.909635 00:00
30 0.481414 0.478925 0.907528 00:00
31 0.480260 0.478536 0.909600 00:00
32 0.478991 0.477601 0.909576 00:00
33 0.479724 0.477481 0.908866 00:00
34 0.478965 0.477511 0.909588 00:00
35 0.476909 0.475594 0.909576 00:00
36 0.476654 0.476073 0.909612 00:00
37 0.476080 0.475933 0.909588 00:00
38 0.475543 0.476111 0.909588 00:00
39 0.476459 0.475091 0.909576 00:00
40 0.474732 0.474897 0.909600 00:00
41 0.473210 0.474601 0.909600 00:00
42 0.473405 0.474714 0.909588 00:00
43 0.472809 0.474222 0.909612 00:00
44 0.472603 0.474114 0.909576 00:00
45 0.472718 0.473824 0.909600 00:00
46 0.475416 0.473723 0.909600 00:00
47 0.474442 0.473768 0.909576 00:00
48 0.472953 0.473679 0.909635 00:00
49 0.473268 0.473621 0.909647 00:00
50 0.470645 0.473657 0.909647 00:00
Paths:  4685 , Tokens:  1797
[167, 352, 1076, 1457, 1505, 1670, 1741, 1762, 1764]
Adj. Rand Index Score: 0.768024.
Adj. Mutual Info Score: 0.823528.
Classes:  [0. 1. 2. 3. 4. 5. 6. 7. 8.] n_clusters:  9.0
Seed:  86420
epoch train_loss valid_loss accuracy time
1 7.165868 6.920221 0.129250 00:00
2 6.572721 5.998789 0.332493 00:00
3 5.457621 4.314236 0.529036 00:00
4 3.716811 2.372469 0.655114 00:00
5 2.269682 1.407940 0.776077 00:00
6 1.410210 0.918973 0.847514 00:00
7 0.948092 0.677954 0.881664 00:00
8 0.702673 0.565478 0.901432 00:00
9 0.586541 0.520813 0.903172 00:00
10 0.535049 0.511576 0.900201 00:00
11 0.510326 0.500359 0.908925 00:00
12 0.501147 0.495555 0.909541 00:00
13 0.498381 0.493842 0.909186 00:00
14 0.493480 0.497370 0.909588 00:00
15 0.492331 0.491710 0.909564 00:00
16 0.492461 0.491894 0.904072 00:00
17 0.493122 0.488029 0.908026 00:00
18 0.489364 0.483508 0.908582 00:00
19 0.487417 0.487976 0.909612 00:00
20 0.486161 0.487269 0.907280 00:00
21 0.485762 0.482517 0.909612 00:00
22 0.484841 0.486907 0.909588 00:00
23 0.485217 0.483928 0.909044 00:00
24 0.483703 0.482598 0.907031 00:00
25 0.484204 0.481293 0.909624 00:00
26 0.481887 0.480456 0.909564 00:00
27 0.480529 0.479673 0.909624 00:00
28 0.479061 0.479270 0.909588 00:00
29 0.478793 0.479016 0.909517 00:00
30 0.478170 0.478963 0.909588 00:00
31 0.479371 0.478957 0.909588 00:00
32 0.477947 0.478725 0.909612 00:00
33 0.476967 0.477571 0.909612 00:00
34 0.474637 0.476869 0.909588 00:00
35 0.475289 0.476540 0.909564 00:00
36 0.477514 0.475982 0.909612 00:00
37 0.478234 0.475372 0.909612 00:00
38 0.475472 0.475616 0.909600 00:00
39 0.474504 0.475134 0.909635 00:00
40 0.474870 0.474629 0.909588 00:00
41 0.473847 0.474575 0.909600 00:00
42 0.473058 0.474222 0.909600 00:00
43 0.474446 0.474082 0.909624 00:00
44 0.473021 0.473810 0.909624 00:00
45 0.476393 0.473723 0.909624 00:00
46 0.473829 0.473646 0.909612 00:00
47 0.472840 0.473683 0.909624 00:00
48 0.474640 0.473603 0.909635 00:00
49 0.472764 0.473607 0.909588 00:00
50 0.474192 0.473587 0.909612 00:00
Paths:  4685 , Tokens:  1797
[167, 352, 1076, 1457, 1505, 1670, 1741, 1762, 1764]
Adj. Rand Index Score: 0.768024.
Adj. Mutual Info Score: 0.823528.
Classes:  [0. 1. 2. 3. 4. 5. 6. 7. 8.] n_clusters:  9.0


File Name to Train:  data_sets/00_bunch/seeds/seeds.csv
#############################################################
HDBSCAN
#############################################################
Adj. Rand Index Score: 0.413773.
Adj. Mutual Info Score: 0.504953.
Classes:  [-1  0  1  2] n_clusters:  3
QSHIFTPP
#############################################################
Adj. Rand Index Score: 0.752283.
Adj. Mutual Info Score: 0.707126.
Classes:  [0 1 2] n_clusters:  3
HQSHIFT
#############################################################
Adj. Rand Index Score: 0.413773.
Adj. Mutual Info Score: 0.504953.
Classes:  [-1  0  1  2] n_clusters:  3
Seed:  0
epoch train_loss valid_loss accuracy time
1 5.286868 5.311974 0.023438 00:00
2 5.250213 5.294427 0.048633 00:00
3 5.205490 5.261797 0.097266 00:00
4 5.144488 5.202171 0.186133 00:00
5 5.066547 5.097850 0.255664 00:00
6 4.967786 4.917655 0.323047 00:00
7 4.846941 4.620129 0.388086 00:00
8 4.696753 4.150607 0.510742 00:00
9 4.513618 3.492771 0.564844 00:00
10 4.292715 2.738514 0.623242 00:00
11 4.040936 2.098820 0.654297 00:00
12 3.775242 1.641743 0.680469 00:00
13 3.506527 1.336225 0.726953 00:00
14 3.250319 1.137240 0.754297 00:00
15 3.012639 1.003635 0.775195 00:00
16 2.793782 0.901302 0.797070 00:00
17 2.595708 0.839225 0.809375 00:00
18 2.414679 0.784838 0.820117 00:00
19 2.252350 0.764192 0.808789 00:00
20 2.107888 0.738009 0.828125 00:00
21 1.977080 0.732627 0.828711 00:00
22 1.860086 0.725621 0.824023 00:00
23 1.754994 0.718116 0.830469 00:00
24 1.660332 0.711344 0.829688 00:00
25 1.574806 0.706352 0.828906 00:00
26 1.497589 0.705745 0.830078 00:00
27 1.428322 0.703157 0.829102 00:00
28 1.365272 0.700213 0.829102 00:00
29 1.308407 0.697687 0.828906 00:00
30 1.256603 0.694806 0.829883 00:00
31 1.208908 0.694146 0.829297 00:00
32 1.165504 0.698290 0.829687 00:00
33 1.127105 0.696422 0.828320 00:00
34 1.089890 0.695854 0.825195 00:00
35 1.057469 0.695962 0.829102 00:00
36 1.027662 0.694147 0.829687 00:00
37 1.000542 0.692388 0.829297 00:00
38 0.975142 0.690467 0.830273 00:00
39 0.952553 0.690168 0.831445 00:00
40 0.930993 0.688476 0.829883 00:00
41 0.911976 0.688864 0.829102 00:00
42 0.893824 0.689079 0.829688 00:00
43 0.878092 0.688989 0.830078 00:00
44 0.862821 0.687582 0.829492 00:00
45 0.848761 0.688763 0.829297 00:00
46 0.836146 0.688509 0.829687 00:00
47 0.823997 0.687859 0.830469 00:00
48 0.812600 0.688055 0.830859 00:00
49 0.803122 0.687607 0.831250 00:00
50 0.794287 0.687996 0.830469 00:00
/home/can/anaconda3/envs/hqshift/lib/python3.6/site-packages/fastai/datasets.py:153: YAMLLoadWarning: calling yaml.load() without Loader=... is deprecated, as the default Loader is unsafe. Please read https://msg.pyyaml.org/load for full details.
  with open(fpath, 'r') as yaml_file: return yaml.load(yaml_file)
Paths:  474 , Tokens:  210
[49, 69, 93, 122, 183, 192]
0
Adj. Rand Index Score: 0.403137.
Adj. Mutual Info Score: 0.473262.
Classes:  [0. 1. 2.] n_clusters:  3.0
Seed:  23
epoch train_loss valid_loss accuracy time
1 5.362040 5.355941 0.017969 00:00
2 5.337102 5.341494 0.018164 00:00
3 5.301125 5.313323 0.021875 00:00
4 5.252801 5.260508 0.063281 00:00
5 5.188508 5.164976 0.169531 00:00
6 5.110781 4.999864 0.281250 00:00
7 5.013660 4.728194 0.410742 00:00
8 4.891727 4.300388 0.488672 00:00
9 4.738280 3.692496 0.534766 00:00
10 4.546075 2.993240 0.567578 00:00
11 4.313321 2.345271 0.597070 00:00
12 4.052401 1.900291 0.626367 00:00
13 3.782417 1.568115 0.677539 00:00
14 3.514635 1.317998 0.711523 00:00
15 3.260965 1.147087 0.743945 00:00
16 3.024332 0.997360 0.770117 00:00
17 2.807970 0.891327 0.788867 00:00
18 2.610609 0.820602 0.814258 00:00
19 2.431222 0.780308 0.822070 00:00
20 2.271942 0.775019 0.818164 00:00
21 2.126440 0.747827 0.820703 00:00
22 1.996768 0.740493 0.812109 00:00
23 1.880530 0.724981 0.829688 00:00
24 1.775500 0.725853 0.823438 00:00
25 1.680761 0.714503 0.829102 00:00
26 1.595943 0.717820 0.824219 00:00
27 1.517812 0.710794 0.828711 00:00
28 1.447312 0.706404 0.829492 00:00
29 1.383532 0.701807 0.830859 00:00
30 1.324932 0.705167 0.830469 00:00
31 1.271837 0.700173 0.828906 00:00
32 1.224000 0.697861 0.829492 00:00
33 1.179601 0.695018 0.830664 00:00
34 1.138950 0.694716 0.829688 00:00
35 1.101927 0.692798 0.831445 00:00
36 1.067662 0.691137 0.829883 00:00
37 1.036721 0.692137 0.830273 00:00
38 1.008749 0.690808 0.829492 00:00
39 0.982643 0.689343 0.830273 00:00
40 0.958829 0.691090 0.830273 00:00
41 0.936553 0.689414 0.830273 00:00
42 0.916641 0.688294 0.829492 00:00
43 0.898479 0.688981 0.829492 00:00
44 0.881660 0.689489 0.830469 00:00
45 0.866103 0.689593 0.829688 00:00
46 0.852086 0.688925 0.829492 00:00
47 0.839105 0.688756 0.829492 00:00
48 0.826271 0.687900 0.830664 00:00
49 0.815372 0.688304 0.830469 00:00
50 0.804807 0.687329 0.830078 00:00
Paths:  474 , Tokens:  210
[49, 69, 93, 122, 183, 192]
0
Adj. Rand Index Score: 0.403137.
Adj. Mutual Info Score: 0.473262.
Classes:  [0. 1. 2.] n_clusters:  3.0
Seed:  42
epoch train_loss valid_loss accuracy time
1 5.363315 5.339903 0.002539 00:00
2 5.337633 5.325966 0.003320 00:00
3 5.303735 5.300277 0.008008 00:00
4 5.258486 5.252803 0.058594 00:00
5 5.200274 5.168670 0.176953 00:00
6 5.128747 5.027593 0.232422 00:00
7 5.038327 4.800417 0.355078 00:00
8 4.924458 4.446014 0.411719 00:00
9 4.779312 3.918836 0.474023 00:00
10 4.596964 3.206694 0.549023 00:00
11 4.370150 2.461984 0.600391 00:00
12 4.111202 1.899392 0.640625 00:00
13 3.839419 1.528348 0.700000 00:00
14 3.570930 1.269776 0.739648 00:00
15 3.314037 1.085884 0.765039 00:00
16 3.073373 0.965271 0.783203 00:00
17 2.852495 0.881212 0.803320 00:00
18 2.651106 0.827014 0.812891 00:00
19 2.468402 0.775089 0.822461 00:00
20 2.304673 0.754952 0.825391 00:00
21 2.155844 0.738223 0.827930 00:00
22 2.022914 0.731568 0.827734 00:00
23 1.903251 0.721757 0.827344 00:00
24 1.796245 0.714243 0.830469 00:00
25 1.700152 0.712697 0.828516 00:00
26 1.613064 0.706485 0.829687 00:00
27 1.533670 0.700547 0.826758 00:00
28 1.462123 0.701916 0.830664 00:00
29 1.396573 0.697537 0.828906 00:00
30 1.337570 0.698121 0.829492 00:00
31 1.283258 0.695470 0.830664 00:00
32 1.233964 0.696157 0.829687 00:00
33 1.188573 0.695102 0.827734 00:00
34 1.147152 0.692032 0.830078 00:00
35 1.109321 0.692833 0.830274 00:00
36 1.074815 0.691245 0.830078 00:00
37 1.043194 0.690444 0.830664 00:00
38 1.014057 0.690498 0.830274 00:00
39 0.987654 0.690018 0.831055 00:00
40 0.963435 0.691070 0.829492 00:00
41 0.941620 0.689340 0.829297 00:00
42 0.921129 0.689264 0.829883 00:00
43 0.902168 0.688518 0.829687 00:00
44 0.885132 0.688177 0.830078 00:00
45 0.869137 0.688134 0.830469 00:00
46 0.854462 0.688108 0.829883 00:00
47 0.840988 0.688506 0.830078 00:00
48 0.828298 0.687414 0.830469 00:00
49 0.816715 0.687530 0.830664 00:00
50 0.806067 0.688392 0.829492 00:00
Paths:  474 , Tokens:  210
[49, 69, 93, 122, 183, 192]
0
Adj. Rand Index Score: 0.403137.
Adj. Mutual Info Score: 0.473262.
Classes:  [0. 1. 2.] n_clusters:  3.0
Seed:  1234
epoch train_loss valid_loss accuracy time
1 5.360583 5.339934 0.000977 00:00
2 5.330885 5.322944 0.002148 00:00
3 5.291595 5.290174 0.030273 00:00
4 5.235236 5.231449 0.058203 00:00
5 5.161954 5.133492 0.199023 00:00
6 5.070352 4.973432 0.342969 00:00
7 4.960203 4.713956 0.431641 00:00
8 4.822572 4.296537 0.492969 00:00
9 4.653031 3.689778 0.573242 00:00
10 4.444983 2.947831 0.611523 00:00
11 4.201418 2.257740 0.637695 00:00
12 3.933572 1.764959 0.661133 00:00
13 3.663574 1.466589 0.689258 00:00
14 3.405512 1.272615 0.729688 00:00
15 3.163580 1.102663 0.747852 00:00
16 2.939159 0.979763 0.781836 00:00
17 2.731814 0.915451 0.784570 00:00
18 2.542495 0.838420 0.800000 00:00
19 2.371861 0.811591 0.816016 00:00
20 2.217978 0.766999 0.826172 00:00
21 2.078703 0.765001 0.815625 00:00
22 1.953796 0.729569 0.827734 00:00
23 1.842127 0.726283 0.821875 00:00
24 1.739573 0.711712 0.830664 00:00
25 1.647262 0.709510 0.829297 00:00
26 1.564008 0.706928 0.831836 00:00
27 1.488897 0.709796 0.829687 00:00
28 1.421902 0.703189 0.830273 00:00
29 1.360253 0.702853 0.830469 00:00
30 1.304384 0.698855 0.828516 00:00
31 1.253579 0.699284 0.830078 00:00
32 1.206853 0.698937 0.829492 00:00
33 1.163904 0.698271 0.830273 00:00
34 1.125033 0.695158 0.830859 00:00
35 1.089696 0.696452 0.829688 00:00
36 1.056884 0.694858 0.829883 00:00
37 1.026893 0.693264 0.829297 00:00
38 0.999946 0.692565 0.829688 00:00
39 0.973866 0.692132 0.828906 00:00
40 0.950946 0.690242 0.830859 00:00
41 0.929400 0.690247 0.830469 00:00
42 0.909856 0.689346 0.830273 00:00
43 0.892060 0.689736 0.829492 00:00
44 0.875426 0.689944 0.829883 00:00
45 0.860927 0.688841 0.828906 00:00
46 0.846746 0.688938 0.829492 00:00
47 0.834220 0.688983 0.828906 00:00
48 0.822524 0.689148 0.828906 00:00
49 0.812031 0.688785 0.829102 00:00
50 0.802275 0.689328 0.829883 00:00
Paths:  474 , Tokens:  210
[49, 69, 93, 122, 183, 192]
0
Adj. Rand Index Score: 0.403137.
Adj. Mutual Info Score: 0.473262.
Classes:  [0. 1. 2.] n_clusters:  3.0
Seed:  43210
epoch train_loss valid_loss accuracy time
1 5.403941 5.347871 0.003320 00:00
2 5.378778 5.333846 0.006055 00:00
3 5.343394 5.307419 0.014844 00:00
4 5.295918 5.260590 0.111328 00:00
5 5.235258 5.178112 0.179102 00:00
6 5.159544 5.039396 0.241602 00:00
7 5.066237 4.809397 0.342188 00:00
8 4.949224 4.430661 0.452930 00:00
9 4.802004 3.868149 0.528320 00:00
10 4.615366 3.131550 0.598633 00:00
11 4.388020 2.404979 0.621484 00:00
12 4.127118 1.895180 0.649805 00:00
13 3.854105 1.570137 0.679883 00:00
14 3.583565 1.320367 0.725195 00:00
15 3.325577 1.153053 0.729883 00:00
16 3.085616 1.017287 0.767383 00:00
17 2.864123 0.921526 0.797656 00:00
18 2.663930 0.876225 0.797266 00:00
19 2.482517 0.814312 0.805859 00:00
20 2.318162 0.787636 0.818359 00:00
21 2.169858 0.755756 0.821289 00:00
22 2.037002 0.754840 0.825391 00:00
23 1.917169 0.728547 0.829297 00:00
24 1.810569 0.718751 0.827539 00:00
25 1.712431 0.712950 0.828711 00:00
26 1.623648 0.707994 0.829883 00:00
27 1.543457 0.706042 0.829492 00:00
28 1.471211 0.701216 0.830469 00:00
29 1.405039 0.701672 0.829102 00:00
30 1.345936 0.700136 0.829492 00:00
31 1.291155 0.700267 0.829688 00:00
32 1.241089 0.695997 0.829102 00:00
33 1.195853 0.694288 0.830273 00:00
34 1.153128 0.695048 0.828906 00:00
35 1.114974 0.692655 0.829492 00:00
36 1.080334 0.693584 0.829687 00:00
37 1.048855 0.691977 0.831250 00:00
38 1.019418 0.691125 0.830664 00:00
39 0.992529 0.691195 0.830859 00:00
40 0.968026 0.690824 0.829492 00:00
41 0.945965 0.690885 0.830078 00:00
42 0.925019 0.689207 0.830078 00:00
43 0.906437 0.690311 0.828711 00:00
44 0.888516 0.689050 0.829883 00:00
45 0.872950 0.688744 0.830859 00:00
46 0.857661 0.688223 0.830469 00:00
47 0.843969 0.688832 0.830078 00:00
48 0.831302 0.689144 0.829687 00:00
49 0.819841 0.688164 0.829883 00:00
50 0.809521 0.687599 0.830469 00:00
Paths:  474 , Tokens:  210
[49, 69, 93, 122, 183, 192]
0
Adj. Rand Index Score: 0.403137.
Adj. Mutual Info Score: 0.473262.
Classes:  [0. 1. 2.] n_clusters:  3.0
Seed:  1133557799
epoch train_loss valid_loss accuracy time
1 5.365668 5.331084 0.009180 00:00
2 5.340348 5.317561 0.046484 00:00
3 5.306249 5.291880 0.082031 00:00
4 5.259461 5.245383 0.136914 00:00
5 5.196848 5.162170 0.213477 00:00
6 5.114454 5.017547 0.293945 00:00
7 5.013210 4.776783 0.347461 00:00
8 4.888155 4.399590 0.386523 00:00
9 4.731781 3.819297 0.465234 00:00
10 4.538999 3.103232 0.509570 00:00
11 4.310895 2.425593 0.593750 00:00
12 4.050942 1.886504 0.647070 00:00
13 3.780839 1.519994 0.697070 00:00
14 3.514572 1.264139 0.732031 00:00
15 3.259416 1.091474 0.762109 00:00
16 3.023930 0.969932 0.784570 00:00
17 2.807616 0.889857 0.795508 00:00
18 2.612500 0.839281 0.795508 00:00
19 2.433778 0.781845 0.820898 00:00
20 2.272805 0.773382 0.823242 00:00
21 2.127784 0.740472 0.815039 00:00
22 1.998613 0.727559 0.828320 00:00
23 1.881709 0.722701 0.828711 00:00
24 1.776437 0.718765 0.828906 00:00
25 1.681547 0.712888 0.829492 00:00
26 1.594938 0.710307 0.829297 00:00
27 1.517329 0.704999 0.829102 00:00
28 1.447304 0.703568 0.828711 00:00
29 1.383166 0.702020 0.828906 00:00
30 1.325160 0.701303 0.823633 00:00
31 1.272173 0.698047 0.828711 00:00
32 1.223696 0.697405 0.829492 00:00
33 1.179137 0.695103 0.829688 00:00
34 1.138323 0.697161 0.829492 00:00
35 1.101286 0.694273 0.827930 00:00
36 1.068279 0.694621 0.829297 00:00
37 1.037236 0.692784 0.830469 00:00
38 1.009717 0.691223 0.829297 00:00
39 0.983269 0.691011 0.829297 00:00
40 0.960620 0.690857 0.829297 00:00
41 0.938687 0.691033 0.830078 00:00
42 0.918546 0.690428 0.829297 00:00
43 0.899595 0.689768 0.829102 00:00
44 0.882471 0.689845 0.829492 00:00
45 0.866615 0.689251 0.828906 00:00
46 0.851530 0.689187 0.830664 00:00
47 0.838332 0.688867 0.829688 00:00
48 0.825267 0.688783 0.830664 00:00
49 0.814548 0.688674 0.829102 00:00
50 0.803670 0.688441 0.830078 00:00
Paths:  474 , Tokens:  210
[49, 69, 93, 122, 183, 192]
0
Adj. Rand Index Score: 0.403137.
Adj. Mutual Info Score: 0.473262.
Classes:  [0. 1. 2.] n_clusters:  3.0
Seed:  22446688
epoch train_loss valid_loss accuracy time
1 5.441226 5.360204 0.005273 00:00
2 5.405838 5.344580 0.006055 00:00
3 5.362249 5.313320 0.009180 00:00
4 5.301203 5.253747 0.045313 00:00
5 5.226854 5.152664 0.126758 00:00
6 5.135906 4.990287 0.211328 00:00
7 5.023493 4.728133 0.373438 00:00
8 4.885191 4.299979 0.457617 00:00
9 4.708629 3.662229 0.516406 00:00
10 4.494712 2.872623 0.598633 00:00
11 4.244754 2.215041 0.641016 00:00
12 3.972751 1.734400 0.677734 00:00
13 3.699681 1.432288 0.701172 00:00
14 3.436446 1.212319 0.744531 00:00
15 3.185848 1.052064 0.767383 00:00
16 2.956450 0.937611 0.787305 00:00
17 2.744856 0.876109 0.802344 00:00
18 2.553393 0.813987 0.801172 00:00
19 2.380120 0.784980 0.823633 00:00
20 2.224722 0.754113 0.818164 00:00
21 2.083665 0.732631 0.827930 00:00
22 1.957701 0.719833 0.828125 00:00
23 1.844541 0.714806 0.829688 00:00
24 1.742213 0.713624 0.829297 00:00
25 1.651020 0.709259 0.818945 00:00
26 1.568126 0.707530 0.829492 00:00
27 1.492600 0.705656 0.828711 00:00
28 1.424289 0.702649 0.829102 00:00
29 1.362254 0.700709 0.829297 00:00
30 1.305769 0.698122 0.830273 00:00
31 1.253935 0.698801 0.829102 00:00
32 1.207279 0.698146 0.829297 00:00
33 1.164430 0.693108 0.830078 00:00
34 1.125676 0.694397 0.829492 00:00
35 1.090790 0.695613 0.828906 00:00
36 1.057644 0.693047 0.829297 00:00
37 1.028132 0.692982 0.830664 00:00
38 1.000291 0.692366 0.829492 00:00
39 0.975141 0.691881 0.829687 00:00
40 0.952435 0.691338 0.829883 00:00
41 0.932107 0.690238 0.829492 00:00
42 0.912812 0.690793 0.829687 00:00
43 0.894672 0.689931 0.829492 00:00
44 0.877837 0.690086 0.828320 00:00
45 0.863422 0.689874 0.829297 00:00
46 0.849760 0.688762 0.828906 00:00
47 0.836286 0.689570 0.829297 00:00
48 0.824444 0.690024 0.829102 00:00
49 0.813577 0.690119 0.828906 00:00
50 0.803005 0.689136 0.828516 00:00
Paths:  474 , Tokens:  210
[49, 69, 93, 122, 183, 192]
0
Adj. Rand Index Score: 0.403137.
Adj. Mutual Info Score: 0.473262.
Classes:  [0. 1. 2.] n_clusters:  3.0
Seed:  123456789
epoch train_loss valid_loss accuracy time
1 5.416296 5.326296 0.012891 00:00
2 5.382298 5.311406 0.014258 00:00
3 5.335956 5.281637 0.051563 00:00
4 5.271511 5.225453 0.115234 00:00
5 5.192565 5.126373 0.224414 00:00
6 5.095237 4.954106 0.274805 00:00
7 4.980387 4.675584 0.327539 00:00
8 4.844080 4.250974 0.402734 00:00
9 4.676033 3.656956 0.477539 00:00
10 4.472655 2.943804 0.550391 00:00
11 4.236574 2.291404 0.600195 00:00
12 3.976966 1.823546 0.641797 00:00
13 3.709702 1.508870 0.682227 00:00
14 3.448513 1.264695 0.726953 00:00
15 3.202489 1.081352 0.761914 00:00
16 2.971808 0.957598 0.786523 00:00
17 2.759957 0.884779 0.802734 00:00
18 2.568247 0.832163 0.798633 00:00
19 2.393802 0.783710 0.819141 00:00
20 2.237143 0.761100 0.824219 00:00
21 2.095860 0.747342 0.808008 00:00
22 1.969106 0.728888 0.826563 00:00
23 1.855531 0.721163 0.828516 00:00
24 1.751959 0.720848 0.815234 00:00
25 1.657961 0.714446 0.828711 00:00
26 1.574122 0.709394 0.828320 00:00
27 1.498112 0.708745 0.829883 00:00
28 1.429204 0.704829 0.828906 00:00
29 1.366304 0.704065 0.829297 00:00
30 1.309739 0.701667 0.828125 00:00
31 1.258214 0.702036 0.828711 00:00
32 1.211292 0.699824 0.829883 00:00
33 1.168059 0.699838 0.829297 00:00
34 1.128668 0.698947 0.829687 00:00
35 1.092727 0.697827 0.829297 00:00
36 1.060246 0.695364 0.829688 00:00
37 1.030234 0.696152 0.829492 00:00
38 1.003003 0.694364 0.829492 00:00
39 0.977798 0.693920 0.829687 00:00
40 0.954657 0.692629 0.830273 00:00
41 0.933317 0.692392 0.830078 00:00
42 0.913273 0.692883 0.831055 00:00
43 0.895107 0.692011 0.829883 00:00
44 0.878877 0.692891 0.828516 00:00
45 0.863623 0.691781 0.829883 00:00
46 0.849281 0.692010 0.829102 00:00
47 0.836306 0.692107 0.829492 00:00
48 0.824154 0.691976 0.828906 00:00
49 0.813400 0.692439 0.829102 00:00
50 0.803464 0.692237 0.828125 00:00
Paths:  474 , Tokens:  210
[49, 69, 93, 122, 183, 192]
0
Adj. Rand Index Score: 0.403137.
Adj. Mutual Info Score: 0.473262.
Classes:  [0. 1. 2.] n_clusters:  3.0
Seed:  987654321
epoch train_loss valid_loss accuracy time
1 5.426015 5.362936 0.003320 00:00
2 5.400373 5.350706 0.024414 00:00
3 5.366880 5.324727 0.034961 00:00
4 5.319046 5.275183 0.060938 00:00
5 5.256995 5.185686 0.113672 00:00
6 5.179133 5.033902 0.227539 00:00
7 5.083931 4.793162 0.337500 00:00
8 4.966156 4.427274 0.442187 00:00
9 4.817691 3.873680 0.512305 00:00
10 4.627573 3.151974 0.582813 00:00
11 4.397860 2.444832 0.604102 00:00
12 4.132672 1.880567 0.629883 00:00
13 3.857416 1.538617 0.677734 00:00
14 3.586134 1.299754 0.724023 00:00
15 3.328479 1.111829 0.761133 00:00
16 3.088239 0.980855 0.783789 00:00
17 2.866111 0.887885 0.800195 00:00
18 2.662750 0.847547 0.813086 00:00
19 2.479776 0.793298 0.822461 00:00
20 2.314445 0.756062 0.826172 00:00
21 2.165126 0.752000 0.828516 00:00
22 2.032464 0.732467 0.818945 00:00
23 1.912521 0.726006 0.829102 00:00
24 1.804305 0.717765 0.828906 00:00
25 1.706875 0.707807 0.829688 00:00
26 1.618619 0.711433 0.830859 00:00
27 1.540354 0.710051 0.828516 00:00
28 1.468145 0.702537 0.830664 00:00
29 1.402957 0.701645 0.828906 00:00
30 1.342520 0.702802 0.830469 00:00
31 1.288029 0.699983 0.828906 00:00
32 1.238224 0.697453 0.830469 00:00
33 1.192025 0.695717 0.828711 00:00
34 1.150905 0.694930 0.828711 00:00
35 1.111843 0.695267 0.830273 00:00
36 1.076876 0.692932 0.830469 00:00
37 1.045596 0.692977 0.827930 00:00
38 1.016590 0.692284 0.829492 00:00
39 0.990128 0.690923 0.830078 00:00
40 0.965581 0.691650 0.830469 00:00
41 0.943066 0.690684 0.830078 00:00
42 0.923258 0.691212 0.829102 00:00
43 0.904686 0.690002 0.829492 00:00
44 0.886782 0.690057 0.828906 00:00
45 0.870754 0.688621 0.829492 00:00
46 0.856111 0.688932 0.829688 00:00
47 0.842217 0.689372 0.830469 00:00
48 0.829881 0.688909 0.830078 00:00
49 0.819214 0.689368 0.830859 00:00
50 0.808700 0.689581 0.830469 00:00
Paths:  474 , Tokens:  210
[49, 69, 93, 122, 183, 192]
Adj. Rand Index Score: 0.403137.
Adj. Mutual Info Score: 0.473262.
Classes:  [0. 1. 2.] n_clusters:  3.0
Seed:  86420
epoch train_loss valid_loss accuracy time
1 5.367269 5.319778 0.009570 00:00
2 5.337466 5.306462 0.027344 00:00
3 5.300839 5.279361 0.039844 00:00
4 5.252572 5.229908 0.061523 00:00
5 5.190369 5.144810 0.134180 00:00
6 5.114606 5.003541 0.287891 00:00
7 5.018514 4.766831 0.422461 00:00
8 4.897010 4.386443 0.489258 00:00
9 4.741733 3.815784 0.550195 00:00
10 4.544442 3.059375 0.590820 00:00
11 4.306994 2.337376 0.634375 00:00
12 4.038640 1.819747 0.666797 00:00
13 3.762982 1.477777 0.703906 00:00
14 3.492625 1.256381 0.737305 00:00
15 3.237948 1.079919 0.768359 00:00
16 3.003993 0.979269 0.778320 00:00
17 2.788758 0.881393 0.803906 00:00
18 2.593487 0.828739 0.797852 00:00
19 2.418019 0.772036 0.823633 00:00
20 2.257862 0.752017 0.815625 00:00
21 2.115612 0.747274 0.821680 00:00
22 1.987264 0.730523 0.829492 00:00
23 1.871745 0.720253 0.828516 00:00
24 1.767492 0.716288 0.829883 00:00
25 1.673183 0.708675 0.829687 00:00
26 1.586771 0.706825 0.829297 00:00
27 1.509312 0.703524 0.828711 00:00
28 1.438722 0.701848 0.830078 00:00
29 1.375632 0.697826 0.829492 00:00
30 1.318375 0.695348 0.829492 00:00
31 1.265612 0.696183 0.830273 00:00
32 1.218103 0.695618 0.830273 00:00
33 1.174246 0.692027 0.829297 00:00
34 1.134149 0.694777 0.829102 00:00
35 1.097459 0.692303 0.829883 00:00
36 1.064309 0.690151 0.831250 00:00
37 1.033359 0.690535 0.830078 00:00
38 1.005856 0.691171 0.829883 00:00
39 0.980026 0.689470 0.829688 00:00
40 0.956219 0.691054 0.831250 00:00
41 0.934772 0.689609 0.830469 00:00
42 0.914801 0.689795 0.829492 00:00
43 0.896468 0.689283 0.829297 00:00
44 0.878961 0.688875 0.828906 00:00
45 0.863868 0.688404 0.829687 00:00
46 0.849902 0.688451 0.828906 00:00
47 0.837169 0.687207 0.829688 00:00
48 0.824529 0.687741 0.830469 00:00
49 0.813520 0.688252 0.829883 00:00
50 0.803252 0.688156 0.830274 00:00
Paths:  474 , Tokens:  210
[49, 69, 93, 122, 183, 192]
0
Adj. Rand Index Score: 0.403137.
Adj. Mutual Info Score: 0.473262.
Classes:  [0. 1. 2.] n_clusters:  3.0


File Name to Train:  data_sets/01_bunch/phoneme/phoneme.csv
#############################################################
HDBSCAN
#############################################################
Adj. Rand Index Score: 0.760092.
Adj. Mutual Info Score: 0.779888.
Classes:  [0 1 2 3] n_clusters:  4
QSHIFTPP
#############################################################
Adj. Rand Index Score: 0.489517.
Adj. Mutual Info Score: 0.584644.
Classes:  [ 0  1  2  3  4  5  6  7  8  9 10] n_clusters:  11
HQSHIFT
#############################################################
Adj. Rand Index Score: 0.760092.
Adj. Mutual Info Score: 0.779888.
Classes:  [0 1 2 3] n_clusters:  4
Seed:  0
epoch train_loss valid_loss accuracy time
1 6.316370 5.808015 0.424561 00:04
2 3.060650 2.653844 0.628309 00:04
3 1.569017 1.378768 0.787614 00:04
4 0.879988 0.798094 0.870250 00:04
5 0.626892 0.601034 0.883298 00:04
6 0.549547 0.527949 0.908386 00:04
7 0.524002 0.531690 0.910930 00:04
8 0.517110 0.518561 0.910351 00:04
9 0.515187 0.521524 0.910724 00:04
10 0.511016 0.519984 0.905259 00:04
11 0.512947 0.509495 0.908355 00:04
12 0.506702 0.508393 0.909645 00:04
13 0.508885 0.504852 0.911018 00:04
14 0.502568 0.502486 0.910706 00:04
15 0.508006 0.505523 0.909136 00:04
16 0.507032 0.500764 0.910400 00:04
17 0.502466 0.499461 0.911488 00:04
18 0.502905 0.498913 0.911298 00:04
19 0.498783 0.498541 0.911420 00:04
20 0.502103 0.497018 0.911420 00:04
21 0.499833 0.497825 0.911435 00:04
22 0.503335 0.497198 0.911451 00:04
23 0.497524 0.496449 0.911475 00:04
24 0.502424 0.496368 0.911482 00:04
25 0.500189 0.496824 0.911420 00:04
26 0.498448 0.496626 0.911503 00:04
27 0.498796 0.496462 0.911503 00:04
28 0.498102 0.495644 0.911516 00:04
29 0.498780 0.495936 0.911506 00:04
30 0.495902 0.495727 0.911509 00:04
31 0.497488 0.495989 0.911548 00:04
32 0.497862 0.495419 0.911537 00:04
33 0.497809 0.495314 0.911532 00:04
34 0.495247 0.495368 0.911521 00:04
35 0.493432 0.495353 0.911532 00:04
36 0.494336 0.495117 0.911519 00:04
37 0.494129 0.495244 0.911524 00:04
38 0.500305 0.495016 0.911521 00:04
39 0.497980 0.494890 0.911521 00:04
40 0.495104 0.494889 0.911516 00:04
41 0.498446 0.494922 0.911519 00:04
42 0.488165 0.494842 0.911521 00:04
43 0.494024 0.494668 0.911524 00:04
44 0.493174 0.494684 0.911519 00:04
45 0.491586 0.494710 0.911532 00:04
46 0.494835 0.494646 0.911532 00:04
47 0.493789 0.494604 0.911529 00:04
48 0.494644 0.494585 0.911516 00:04
49 0.493714 0.494571 0.911527 00:04
50 0.492076 0.494580 0.911527 00:04
/home/can/anaconda3/envs/hqshift/lib/python3.6/site-packages/fastai/datasets.py:153: YAMLLoadWarning: calling yaml.load() without Loader=... is deprecated, as the default Loader is unsafe. Please read https://msg.pyyaml.org/load for full details.
  with open(fpath, 'r') as yaml_file: return yaml.load(yaml_file)
Paths:  19283 , Tokens:  4508
[108, 385, 419, 1116, 2174, 2396, 2751, 2869, 3200, 3409, 3724, 3890, 4023, 4174, 4374, 4411, 4425]
Adj. Rand Index Score: 0.758399.
Adj. Mutual Info Score: 0.777229.
Classes:  [0. 1. 2. 3.] n_clusters:  4.0
Seed:  23
epoch train_loss valid_loss accuracy time
1 6.574629 6.074909 0.429506 00:04
2 3.216560 2.813805 0.621636 00:04
3 1.613814 1.430764 0.781130 00:04
4 0.901068 0.808517 0.863429 00:04
5 0.629393 0.595092 0.898019 00:04
6 0.545325 0.531476 0.908884 00:04
7 0.522726 0.518444 0.909894 00:04
8 0.516012 0.517517 0.904140 00:04
9 0.515197 0.512823 0.911402 00:04
10 0.508616 0.508320 0.910790 00:04
11 0.511817 0.506427 0.910805 00:04
12 0.506736 0.504520 0.911046 00:04
13 0.502374 0.502506 0.911101 00:04
14 0.504862 0.502608 0.911478 00:04
15 0.503657 0.502396 0.911472 00:04
16 0.503264 0.500326 0.911477 00:04
17 0.499323 0.499110 0.911501 00:04
18 0.504980 0.500454 0.911454 00:04
19 0.505993 0.498994 0.911457 00:04
20 0.502101 0.498015 0.911464 00:04
21 0.497188 0.497319 0.911509 00:04
22 0.496976 0.497415 0.911459 00:04
23 0.503793 0.497564 0.910867 00:04
24 0.494184 0.496543 0.911506 00:04
25 0.502126 0.495855 0.911521 00:04
26 0.497916 0.496307 0.911532 00:04
27 0.500842 0.496016 0.911532 00:04
28 0.498431 0.496234 0.911535 00:04
29 0.495241 0.495859 0.911537 00:04
30 0.498878 0.496075 0.911519 00:04
31 0.492436 0.495340 0.911524 00:04
32 0.497832 0.495400 0.911534 00:04
33 0.498935 0.495298 0.911519 00:04
34 0.495751 0.495553 0.911524 00:04
35 0.498347 0.495156 0.911521 00:04
36 0.499127 0.495370 0.911530 00:04
37 0.499041 0.495368 0.911524 00:04
38 0.495347 0.495472 0.911524 00:04
39 0.494864 0.494883 0.911519 00:04
40 0.491581 0.494834 0.911519 00:04
41 0.494969 0.494946 0.911527 00:04
42 0.501502 0.495000 0.911535 00:04
43 0.495103 0.494698 0.911521 00:04
44 0.495817 0.494894 0.911521 00:04
45 0.494023 0.494670 0.911521 00:04
46 0.491549 0.494622 0.911526 00:04
47 0.498099 0.494614 0.911516 00:04
48 0.493214 0.494612 0.911521 00:04
49 0.496769 0.494597 0.911535 00:04
50 0.495355 0.494619 0.911516 00:04
Paths:  19283 , Tokens:  4508
[108, 385, 419, 1116, 2174, 2396, 2751, 2869, 3200, 3409, 3724, 3890, 4023, 4174, 4374, 4411, 4425]
Adj. Rand Index Score: 0.758399.
Adj. Mutual Info Score: 0.777229.
Classes:  [0. 1. 2. 3.] n_clusters:  4.0
Seed:  42
epoch train_loss valid_loss accuracy time
1 6.407751 5.903373 0.426181 00:04
2 3.089772 2.699888 0.632766 00:04
3 1.571529 1.408835 0.784367 00:04
4 0.876988 0.800000 0.870069 00:04
5 0.633197 0.591254 0.897023 00:04
6 0.541815 0.537480 0.909251 00:04
7 0.531185 0.524764 0.911083 00:04
8 0.515980 0.522101 0.902769 00:04
9 0.518969 0.513482 0.911384 00:04
10 0.508358 0.509831 0.911026 00:04
11 0.509080 0.508642 0.911337 00:04
12 0.508187 0.508477 0.911397 00:04
13 0.512016 0.505714 0.911267 00:04
14 0.504984 0.504177 0.910694 00:04
15 0.503389 0.499772 0.911425 00:04
16 0.501744 0.498861 0.911436 00:04
17 0.504885 0.500235 0.911487 00:04
18 0.504032 0.498135 0.911472 00:04
19 0.502422 0.497629 0.911500 00:04
20 0.502267 0.498413 0.910462 00:04
21 0.499621 0.498334 0.910740 00:04
22 0.498938 0.497503 0.911500 00:04
23 0.497962 0.497379 0.911405 00:04
24 0.497507 0.496922 0.911482 00:04
25 0.502372 0.497171 0.911506 00:04
26 0.499433 0.496359 0.911524 00:04
27 0.501449 0.495919 0.911532 00:04
28 0.497380 0.495587 0.911532 00:04
29 0.503203 0.495575 0.911540 00:04
30 0.494966 0.495803 0.911524 00:04
31 0.498759 0.495334 0.911532 00:04
32 0.500002 0.495564 0.911513 00:04
33 0.498622 0.496321 0.911537 00:04
34 0.498460 0.495512 0.911553 00:04
35 0.494736 0.495297 0.911532 00:04
36 0.497079 0.494899 0.911540 00:04
37 0.497569 0.495220 0.911545 00:04
38 0.495902 0.495036 0.911521 00:04
39 0.495797 0.494849 0.911524 00:04
40 0.493933 0.494966 0.911519 00:04
41 0.492406 0.494990 0.911521 00:04
42 0.495971 0.494784 0.911532 00:04
43 0.491768 0.494750 0.911519 00:04
44 0.490600 0.494740 0.911521 00:04
45 0.492189 0.494635 0.911535 00:04
46 0.493950 0.494582 0.911532 00:04
47 0.490264 0.494583 0.911529 00:04
48 0.495239 0.494606 0.911524 00:04
49 0.493998 0.494566 0.911529 00:04
50 0.496160 0.494583 0.911527 00:04
Paths:  19283 , Tokens:  4508
[108, 385, 419, 1116, 2174, 2396, 2751, 2869, 3200, 3409, 3724, 3890, 4023, 4174, 4374, 4411, 4425]
Adj. Rand Index Score: 0.758399.
Adj. Mutual Info Score: 0.777229.
Classes:  [0. 1. 2. 3.] n_clusters:  4.0
Seed:  1234
epoch train_loss valid_loss accuracy time
1 6.567103 6.077250 0.409840 00:04
2 3.233395 2.826070 0.620577 00:04
3 1.633889 1.435499 0.779454 00:04
4 0.900862 0.805823 0.869539 00:04
5 0.635880 0.599523 0.901186 00:04
6 0.553268 0.535422 0.909263 00:04
7 0.519857 0.518767 0.910618 00:04
8 0.517749 0.512996 0.910083 00:04
9 0.512096 0.510819 0.910307 00:04
10 0.510519 0.510142 0.911039 00:04
11 0.507621 0.506216 0.911490 00:04
12 0.502458 0.502422 0.911474 00:04
13 0.507862 0.502164 0.911498 00:04
14 0.504194 0.500906 0.911524 00:04
15 0.505379 0.499818 0.911503 00:04
16 0.502710 0.501870 0.910240 00:04
17 0.498563 0.500947 0.910271 00:04
18 0.498853 0.500783 0.911421 00:04
19 0.500933 0.498495 0.911500 00:04
20 0.504128 0.498253 0.911436 00:04
21 0.502040 0.498109 0.911511 00:04
22 0.502795 0.500456 0.909546 00:04
23 0.493911 0.497386 0.911496 00:04
24 0.496243 0.497873 0.911498 00:04
25 0.499255 0.496821 0.911509 00:04
26 0.497802 0.496308 0.911509 00:04
27 0.499468 0.496156 0.911526 00:04
28 0.497907 0.496381 0.911527 00:04
29 0.498290 0.496757 0.911522 00:04
30 0.499554 0.496056 0.911530 00:04
31 0.498391 0.495882 0.911521 00:04
32 0.497052 0.495372 0.911521 00:04
33 0.497853 0.495620 0.911519 00:04
34 0.493121 0.495398 0.911532 00:04
35 0.499827 0.495981 0.910530 00:04
36 0.498413 0.495287 0.911529 00:04
37 0.497525 0.495444 0.911535 00:04
38 0.500180 0.495049 0.911537 00:04
39 0.495193 0.495108 0.911540 00:04
40 0.491059 0.494918 0.911521 00:04
41 0.495378 0.494890 0.911526 00:04
42 0.493635 0.494944 0.911540 00:04
43 0.496340 0.494892 0.911521 00:04
44 0.497452 0.494766 0.911521 00:04
45 0.490121 0.494665 0.911521 00:04
46 0.494310 0.494636 0.911532 00:04
47 0.492629 0.494602 0.911521 00:04
48 0.491547 0.494637 0.911535 00:04
49 0.493652 0.494587 0.911537 00:04
50 0.498279 0.494578 0.911530 00:04
Paths:  19283 , Tokens:  4508
[108, 385, 419, 1116, 2174, 2396, 2751, 2869, 3200, 3409, 3724, 3890, 4023, 4174, 4374, 4411, 4425]
Adj. Rand Index Score: 0.758399.
Adj. Mutual Info Score: 0.777229.
Classes:  [0. 1. 2. 3.] n_clusters:  4.0
Seed:  43210
epoch train_loss valid_loss accuracy time
1 6.383917 5.873903 0.460174 00:04
2 3.099858 2.662646 0.634549 00:04
3 1.569690 1.382594 0.787370 00:04
4 0.884837 0.791723 0.871468 00:04
5 0.627135 0.592716 0.901293 00:04
6 0.551325 0.539741 0.902105 00:04
7 0.521261 0.520157 0.909692 00:04
8 0.514572 0.523708 0.911399 00:04
9 0.513539 0.521080 0.911262 00:04
10 0.505717 0.509300 0.911239 00:04
11 0.510449 0.509232 0.910779 00:04
12 0.511240 0.506448 0.910454 00:04
13 0.508134 0.505376 0.911480 00:04
14 0.507567 0.502820 0.911446 00:04
15 0.502476 0.500838 0.911519 00:04
16 0.507603 0.501051 0.911498 00:04
17 0.503874 0.500283 0.911483 00:04
18 0.500898 0.498940 0.910507 00:04
19 0.503097 0.499592 0.911524 00:04
20 0.502370 0.500871 0.911503 00:04
21 0.504610 0.498710 0.911535 00:04
22 0.504093 0.498084 0.911547 00:04
23 0.502213 0.497456 0.911535 00:04
24 0.500887 0.496848 0.911529 00:04
25 0.502352 0.497671 0.911519 00:04
26 0.497999 0.497586 0.910917 00:04
27 0.500139 0.496353 0.911524 00:04
28 0.497663 0.497160 0.911475 00:04
29 0.500892 0.497247 0.911532 00:04
30 0.501002 0.496934 0.911521 00:04
31 0.501793 0.496621 0.911521 00:04
32 0.492742 0.496079 0.911532 00:04
33 0.501703 0.496253 0.911537 00:04
34 0.499625 0.495944 0.911519 00:04
35 0.503153 0.495715 0.911524 00:04
36 0.498066 0.495510 0.911532 00:04
37 0.497141 0.495578 0.911535 00:04
38 0.490588 0.495207 0.911532 00:04
39 0.502271 0.495390 0.911521 00:04
40 0.493533 0.495351 0.911537 00:04
41 0.497653 0.495273 0.911524 00:04
42 0.494762 0.494964 0.911519 00:04
43 0.492403 0.494861 0.911516 00:04
44 0.492789 0.494870 0.911524 00:04
45 0.493719 0.494793 0.911521 00:04
46 0.495700 0.494722 0.911527 00:04
47 0.489450 0.494644 0.911519 00:04
48 0.493171 0.494657 0.911521 00:04
49 0.496041 0.494632 0.911524 00:04
50 0.494040 0.494624 0.911535 00:04
Paths:  19283 , Tokens:  4508
[108, 385, 419, 1116, 2174, 2396, 2751, 2869, 3200, 3409, 3724, 3890, 4023, 4174, 4374, 4411, 4425]
Adj. Rand Index Score: 0.758399.
Adj. Mutual Info Score: 0.777229.
Classes:  [0. 1. 2. 3.] n_clusters:  4.0
Seed:  1133557799
epoch train_loss valid_loss accuracy time
1 6.311153 5.747099 0.447690 00:04
2 2.998718 2.579906 0.646626 00:04
3 1.520858 1.355617 0.792829 00:04
4 0.875741 0.788369 0.855256 00:04
5 0.627352 0.589015 0.893882 00:04
6 0.543193 0.527139 0.907644 00:04
7 0.524663 0.519561 0.911119 00:04
8 0.512565 0.513229 0.911443 00:04
9 0.512188 0.513094 0.902702 00:04
10 0.513148 0.509940 0.910058 00:04
11 0.512527 0.507598 0.911480 00:04
12 0.508323 0.508421 0.910707 00:04
13 0.504349 0.507575 0.908928 00:04
14 0.507052 0.502652 0.911218 00:04
15 0.506710 0.500758 0.911470 00:04
16 0.506915 0.500305 0.910504 00:04
17 0.503481 0.499972 0.911478 00:04
18 0.505662 0.501456 0.911457 00:04
19 0.503794 0.500600 0.911499 00:04
20 0.501380 0.498069 0.911503 00:04
21 0.501961 0.498487 0.911542 00:04
22 0.500599 0.497959 0.911529 00:04
23 0.503620 0.497950 0.911519 00:04
24 0.499947 0.499251 0.911537 00:04
25 0.498849 0.496620 0.911485 00:04
26 0.495410 0.497012 0.911500 00:04
27 0.498994 0.497425 0.911498 00:04
28 0.502690 0.497362 0.911548 00:04
29 0.501066 0.497038 0.911530 00:04
30 0.502619 0.495970 0.911524 00:04
31 0.498425 0.496229 0.911530 00:04
32 0.497635 0.495801 0.911516 00:04
33 0.496855 0.495882 0.911519 00:04
34 0.493527 0.495332 0.911524 00:04
35 0.497261 0.495437 0.911526 00:04
36 0.495499 0.495750 0.911527 00:04
37 0.491031 0.495225 0.911521 00:04
38 0.497696 0.495309 0.911519 00:04
39 0.496029 0.495138 0.911519 00:04
40 0.488730 0.495144 0.911524 00:04
41 0.495351 0.494999 0.911521 00:04
42 0.490797 0.494933 0.911532 00:04
43 0.495209 0.494900 0.911516 00:04
44 0.495601 0.494812 0.911530 00:04
45 0.494981 0.494676 0.911537 00:04
46 0.496601 0.494672 0.911526 00:04
47 0.496476 0.494642 0.911519 00:04
48 0.499636 0.494626 0.911524 00:04
49 0.499416 0.494625 0.911521 00:04
50 0.496098 0.494620 0.911532 00:04
Paths:  19283 , Tokens:  4508
[108, 385, 419, 1116, 2174, 2396, 2751, 2869, 3200, 3409, 3724, 3890, 4023, 4174, 4374, 4411, 4425]
0
Adj. Rand Index Score: 0.758399.
Adj. Mutual Info Score: 0.777229.
Classes:  [0. 1. 2. 3.] n_clusters:  4.0
Seed:  22446688
epoch train_loss valid_loss accuracy time
1 6.511774 5.994586 0.428265 00:04
2 3.185093 2.750033 0.630586 00:04
3 1.596701 1.415010 0.783755 00:04
4 0.891688 0.802989 0.869640 00:04
5 0.636907 0.592670 0.900119 00:04
6 0.551003 0.530685 0.906452 00:04
7 0.527371 0.525561 0.903891 00:04
8 0.516031 0.518288 0.911324 00:04
9 0.515622 0.511869 0.911311 00:04
10 0.512435 0.510486 0.903945 00:04
11 0.506699 0.508109 0.911485 00:04
12 0.508414 0.505528 0.909097 00:04
13 0.506379 0.504274 0.910380 00:04
14 0.507715 0.504565 0.910499 00:04
15 0.506769 0.501428 0.911501 00:04
16 0.501007 0.501759 0.910564 00:04
17 0.500529 0.500942 0.911490 00:04
18 0.504624 0.498807 0.911524 00:04
19 0.501568 0.499588 0.911256 00:04
20 0.504967 0.498198 0.911488 00:04
21 0.500730 0.498571 0.911532 00:04
22 0.499236 0.498901 0.911545 00:04
23 0.498679 0.497327 0.911519 00:04
24 0.496717 0.497421 0.911521 00:04
25 0.503022 0.497200 0.911524 00:04
26 0.497874 0.497318 0.911542 00:04
27 0.500350 0.497494 0.911545 00:04
28 0.496128 0.496890 0.911522 00:04
29 0.500126 0.496784 0.911163 00:04
30 0.503530 0.496351 0.911537 00:04
31 0.493742 0.496357 0.911532 00:04
32 0.499944 0.496454 0.911537 00:04
33 0.500106 0.496229 0.911530 00:04
34 0.498798 0.496075 0.911516 00:04
35 0.492264 0.495740 0.911513 00:04
36 0.499939 0.495582 0.911535 00:04
37 0.488657 0.495423 0.911537 00:04
38 0.499790 0.495523 0.911516 00:04
39 0.494697 0.495349 0.911530 00:04
40 0.498539 0.495046 0.911516 00:04
41 0.496183 0.495187 0.911535 00:04
42 0.497381 0.495025 0.911519 00:04
43 0.494012 0.494893 0.911527 00:04
44 0.492214 0.494896 0.911535 00:04
45 0.493458 0.494823 0.911521 00:04
46 0.496006 0.494654 0.911529 00:04
47 0.492867 0.494635 0.911524 00:04
48 0.491724 0.494635 0.911527 00:04
49 0.490339 0.494609 0.911531 00:04
50 0.492919 0.494620 0.911529 00:04
Paths:  19283 , Tokens:  4508
[108, 385, 419, 1116, 2174, 2396, 2751, 2869, 3200, 3409, 3724, 3890, 4023, 4174, 4374, 4411, 4425]
Adj. Rand Index Score: 0.758399.
Adj. Mutual Info Score: 0.777229.
Classes:  [0. 1. 2. 3.] n_clusters:  4.0
Seed:  123456789
epoch train_loss valid_loss accuracy time
1 6.222045 5.665717 0.468234 00:04
2 2.952319 2.553141 0.650452 00:04
3 1.514865 1.351199 0.792338 00:04
4 0.882625 0.789553 0.871102 00:04
5 0.623838 0.589360 0.899237 00:04
6 0.541614 0.530220 0.909515 00:04
7 0.528405 0.523883 0.910423 00:04
8 0.518249 0.517329 0.910678 00:04
9 0.513210 0.517580 0.909727 00:04
10 0.516828 0.512543 0.911475 00:04
11 0.512313 0.509546 0.910992 00:04
12 0.505042 0.509184 0.910577 00:04
13 0.508389 0.505775 0.910588 00:04
14 0.511125 0.503854 0.911485 00:04
15 0.506155 0.500595 0.911519 00:04
16 0.501599 0.502936 0.911467 00:04
17 0.500959 0.500204 0.911514 00:04
18 0.501596 0.499840 0.910795 00:04
19 0.503404 0.498786 0.911514 00:04
20 0.497241 0.499719 0.911522 00:04
21 0.503550 0.498048 0.910720 00:04
22 0.501186 0.497609 0.911514 00:04
23 0.501410 0.498402 0.911472 00:04
24 0.501594 0.496718 0.911503 00:04
25 0.499830 0.495990 0.911503 00:04
26 0.497772 0.495761 0.911503 00:04
27 0.503221 0.495784 0.911511 00:04
28 0.497903 0.495924 0.911519 00:04
29 0.495152 0.496002 0.911530 00:04
30 0.503613 0.496427 0.911521 00:04
31 0.498641 0.495514 0.911530 00:04
32 0.493836 0.495975 0.911519 00:04
33 0.502300 0.495811 0.911530 00:04
34 0.500632 0.495052 0.911524 00:04
35 0.501511 0.495604 0.911516 00:04
36 0.495171 0.495233 0.911511 00:04
37 0.498390 0.495163 0.911527 00:04
38 0.493788 0.494944 0.911516 00:04
39 0.495449 0.495220 0.911519 00:04
40 0.496540 0.494918 0.911540 00:04
41 0.501158 0.494840 0.911532 00:04
42 0.495186 0.494740 0.911532 00:04
43 0.496792 0.494763 0.911524 00:04
44 0.499451 0.494719 0.911519 00:04
45 0.494483 0.494676 0.911530 00:04
46 0.494464 0.494625 0.911530 00:04
47 0.483798 0.494629 0.911543 00:04
48 0.495507 0.494588 0.911532 00:04
49 0.493002 0.494581 0.911527 00:04
50 0.497355 0.494604 0.911526 00:04
Paths:  19283 , Tokens:  4508
[108, 385, 419, 1116, 2174, 2396, 2751, 2869, 3200, 3409, 3724, 3890, 4023, 4174, 4374, 4411, 4425]
0
0
Adj. Rand Index Score: 0.758399.
Adj. Mutual Info Score: 0.777229.
Classes:  [0. 1. 2. 3.] n_clusters:  4.0
Seed:  987654321
epoch train_loss valid_loss accuracy time
1 6.633595 6.114462 0.393815 00:04
2 3.344829 2.904063 0.605038 00:04
3 1.679756 1.483198 0.767543 00:04
4 0.925486 0.823934 0.870518 00:04
5 0.641597 0.605174 0.900394 00:04
6 0.554788 0.537200 0.909022 00:04
7 0.528715 0.524172 0.910930 00:04
8 0.515512 0.525041 0.905949 00:04
9 0.513710 0.514296 0.911286 00:04
10 0.515286 0.511148 0.910252 00:04
11 0.514642 0.513725 0.910756 00:04
12 0.509878 0.508258 0.910029 00:04
13 0.515179 0.505029 0.911400 00:04
14 0.504871 0.502847 0.910904 00:04
15 0.504528 0.501146 0.911501 00:04
16 0.503243 0.502557 0.911511 00:04
17 0.506470 0.500646 0.911485 00:04
18 0.501773 0.499622 0.911420 00:04
19 0.504406 0.497681 0.911506 00:04
20 0.500780 0.497802 0.911495 00:04
21 0.503332 0.497699 0.911506 00:04
22 0.506872 0.497552 0.911485 00:04
23 0.502478 0.496884 0.911501 00:04
24 0.496347 0.497109 0.911496 00:04
25 0.500915 0.496783 0.911532 00:04
26 0.505084 0.496379 0.911530 00:04
27 0.503153 0.496572 0.911506 00:04
28 0.497232 0.495979 0.911534 00:04
29 0.497594 0.496419 0.911542 00:04
30 0.498555 0.496111 0.911532 00:04
31 0.499236 0.495608 0.911521 00:04
32 0.495691 0.496056 0.911521 00:04
33 0.501407 0.495284 0.911527 00:04
34 0.498683 0.495567 0.911530 00:04
35 0.496337 0.495497 0.911524 00:04
36 0.500277 0.495079 0.911535 00:04
37 0.495755 0.495099 0.911529 00:04
38 0.497741 0.495193 0.911527 00:04
39 0.492876 0.494976 0.911540 00:04
40 0.493352 0.494907 0.911524 00:04
41 0.497051 0.495092 0.911516 00:04
42 0.497671 0.494935 0.911521 00:04
43 0.498402 0.494793 0.911513 00:04
44 0.495919 0.494765 0.911535 00:04
45 0.498595 0.494763 0.911524 00:04
46 0.493670 0.494688 0.911532 00:04
47 0.490236 0.494672 0.911519 00:04
48 0.491007 0.494629 0.911516 00:04
49 0.493050 0.494604 0.911537 00:04
50 0.494359 0.494629 0.911521 00:04
Paths:  19283 , Tokens:  4508
[108, 385, 419, 1116, 2174, 2396, 2751, 2869, 3200, 3409, 3724, 3890, 4023, 4174, 4374, 4411, 4425]
Adj. Rand Index Score: 0.758399.
Adj. Mutual Info Score: 0.777229.
Classes:  [0. 1. 2. 3.] n_clusters:  4.0
Seed:  86420
epoch train_loss valid_loss accuracy time
1 6.302538 5.741844 0.438258 00:04
2 2.993598 2.592113 0.643428 00:04
3 1.531483 1.361758 0.789979 00:04
4 0.875604 0.798746 0.868944 00:04
5 0.622038 0.590791 0.895299 00:04
6 0.546139 0.533619 0.906899 00:04
7 0.524033 0.522425 0.909816 00:04
8 0.521794 0.515130 0.910706 00:04
9 0.512706 0.515094 0.911259 00:04
10 0.518354 0.510153 0.911374 00:04
11 0.511431 0.511053 0.910678 00:04
12 0.513789 0.513643 0.911464 00:04
13 0.506143 0.505171 0.910982 00:04
14 0.505786 0.503832 0.911485 00:04
15 0.503751 0.505079 0.910994 00:04
16 0.505337 0.500077 0.911516 00:04
17 0.505150 0.500869 0.911459 00:04
18 0.502430 0.499489 0.911475 00:04
19 0.502984 0.500156 0.911482 00:04
20 0.497248 0.498815 0.911532 00:04
21 0.502799 0.497870 0.911524 00:04
22 0.495194 0.498696 0.911511 00:04
23 0.499395 0.497675 0.911522 00:04
24 0.500061 0.500077 0.906694 00:04
25 0.502542 0.498088 0.910623 00:04
26 0.495832 0.497823 0.911516 00:04
27 0.500248 0.497109 0.911524 00:04
28 0.498383 0.497358 0.911522 00:04
29 0.495843 0.496668 0.911529 00:04
30 0.500051 0.497112 0.911483 00:04
31 0.499631 0.496208 0.911527 00:04
32 0.497065 0.496149 0.911532 00:04
33 0.498805 0.496326 0.911532 00:04
34 0.496551 0.495791 0.911516 00:04
35 0.498496 0.495633 0.911531 00:04
36 0.495710 0.496222 0.911516 00:04
37 0.497975 0.495965 0.911521 00:04
38 0.496003 0.495376 0.911534 00:04
39 0.492734 0.495478 0.911524 00:04
40 0.497143 0.495411 0.911535 00:04
41 0.499048 0.495068 0.911519 00:04
42 0.493998 0.494930 0.911522 00:04
43 0.495525 0.494851 0.911516 00:04
44 0.494352 0.494836 0.911535 00:04
45 0.492804 0.494738 0.911530 00:04
46 0.494526 0.494700 0.911535 00:04
47 0.491948 0.494647 0.911521 00:04
48 0.494768 0.494656 0.911521 00:04
49 0.489734 0.494631 0.911526 00:04
50 0.492633 0.494621 0.911526 00:04
Paths:  19283 , Tokens:  4508
[108, 385, 419, 1116, 2174, 2396, 2751, 2869, 3200, 3409, 3724, 3890, 4023, 4174, 4374, 4411, 4425]
Adj. Rand Index Score: 0.758399.
Adj. Mutual Info Score: 0.777229.
Classes:  [0. 1. 2. 3.] n_clusters:  4.0


File Name to Train:  data_sets/02_bunch/page-blocks/page-blocks.csv
#############################################################
HDBSCAN
#############################################################
Adj. Rand Index Score: 0.577030.
Adj. Mutual Info Score: 0.354158.
Classes:  [-1  0  1] n_clusters:  2
QSHIFTPP
#############################################################
Adj. Rand Index Score: 0.027815.
Adj. Mutual Info Score: 0.089215.
Classes:  [ 0  1  2  3  4  5  6  7  8  9 10 11 12 13 14 15 16 17 18] n_clusters:  19
HQSHIFT
#############################################################
Adj. Rand Index Score: 0.577030.
Adj. Mutual Info Score: 0.354158.
Classes:  [-1  0  1] n_clusters:  2
Seed:  0
epoch train_loss valid_loss accuracy time
1 3.686644 3.363606 0.563756 00:10
2 1.346486 1.223939 0.821244 00:09
3 0.638392 0.600439 0.905686 00:09
4 0.473369 0.454292 0.923814 00:09
5 0.426099 0.429826 0.917858 00:09
6 0.417381 0.413846 0.925422 00:09
7 0.417179 0.412587 0.928668 00:09
8 0.411026 0.407863 0.930110 00:09
9 0.407079 0.415560 0.917235 00:09
10 0.411874 0.405438 0.930037 00:09
11 0.407186 0.406324 0.929464 00:09
12 0.405732 0.401726 0.930107 00:09
13 0.411143 0.400720 0.930220 00:09
14 0.407308 0.400978 0.930220 00:09
15 0.404414 0.401553 0.930228 00:09
16 0.408589 0.401595 0.930256 00:09
17 0.407007 0.399855 0.930299 00:09
18 0.405717 0.401294 0.929829 00:09
19 0.403317 0.398553 0.930272 00:09
20 0.404767 0.399100 0.930216 00:09
21 0.398168 0.399212 0.929987 00:09
22 0.401089 0.398242 0.930326 00:09
23 0.407235 0.400158 0.930300 00:09
24 0.407258 0.397767 0.930324 00:09
25 0.405954 0.398194 0.930319 00:09
26 0.399673 0.397791 0.930311 00:09
27 0.400046 0.398039 0.930325 00:09
28 0.405928 0.397463 0.930318 00:09
29 0.398773 0.397608 0.930326 00:09
30 0.397932 0.397188 0.930339 00:09
31 0.400666 0.397058 0.930326 00:09
32 0.398488 0.396859 0.930352 00:09
33 0.396110 0.396837 0.930353 00:09
34 0.397339 0.396588 0.930353 00:09
35 0.399661 0.396577 0.930353 00:09
36 0.393867 0.396584 0.930353 00:09
37 0.399414 0.396427 0.930352 00:09
38 0.401238 0.396704 0.930350 00:09
39 0.394030 0.396348 0.930353 00:09
40 0.395906 0.396199 0.930353 00:09
41 0.394254 0.396236 0.930353 00:09
42 0.398252 0.396147 0.930352 00:09
43 0.397729 0.396204 0.930353 00:09
44 0.397457 0.396094 0.930353 00:09
45 0.397565 0.396119 0.930350 00:09
46 0.396101 0.396055 0.930353 00:09
47 0.403096 0.395959 0.930355 00:09
48 0.392441 0.395943 0.930352 00:09
49 0.397227 0.395931 0.930355 00:09
50 0.393640 0.395953 0.930351 00:09
/home/can/anaconda3/envs/hqshift/lib/python3.6/site-packages/fastai/datasets.py:153: YAMLLoadWarning: calling yaml.load() without Loader=... is deprecated, as the default Loader is unsafe. Please read https://msg.pyyaml.org/load for full details.
  with open(fpath, 'r') as yaml_file: return yaml.load(yaml_file)
Paths:  31223 , Tokens:  5473
[172, 372, 527, 1718, 1828, 2132, 2229, 2312, 2424, 2454, 2462, 2633, 3129, 3159, 3276, 3356, 3417, 3637, 3692, 4288, 4772, 5216, 5364, 5384, 5463]
0
Adj. Rand Index Score: 0.569727.
Adj. Mutual Info Score: 0.300430.
Classes:  [-1.  0.  1.] n_clusters:  2.0
Seed:  23
epoch train_loss valid_loss accuracy time
1 3.581009 3.250535 0.563903 00:09
2 1.319307 1.209390 0.820916 00:09
3 0.637267 0.595163 0.905516 00:09
4 0.469070 0.452309 0.924134 00:09
5 0.430157 0.423031 0.926242 00:09
6 0.418012 0.411446 0.930007 00:09
7 0.413057 0.408487 0.926215 00:09
8 0.411068 0.408577 0.929177 00:09
9 0.408969 0.410382 0.929144 00:09
10 0.408813 0.401910 0.930273 00:09
11 0.409367 0.401091 0.930102 00:09
12 0.406928 0.402491 0.930140 00:09
13 0.405388 0.402041 0.930093 00:09
14 0.408120 0.399486 0.930227 00:09
15 0.409074 0.398841 0.930236 00:09
16 0.403646 0.399961 0.928994 00:09
17 0.404421 0.398947 0.930174 00:09
18 0.398311 0.399399 0.929462 00:09
19 0.401323 0.398222 0.930287 00:09
20 0.401982 0.398856 0.930222 00:10
21 0.401278 0.398304 0.930215 00:10
22 0.401317 0.398455 0.930247 00:09
23 0.406155 0.398420 0.930283 00:09
24 0.401173 0.397490 0.930299 00:10
25 0.401689 0.397709 0.930301 00:10
26 0.398644 0.397662 0.930337 00:09
27 0.404593 0.397723 0.930327 00:09
28 0.396153 0.397125 0.930328 00:10
29 0.401734 0.397255 0.930333 00:10
30 0.399990 0.397460 0.930349 00:09
31 0.395851 0.396828 0.930352 00:09
32 0.398350 0.396904 0.930345 00:09
33 0.392483 0.397302 0.930353 00:09
34 0.396695 0.396433 0.930351 00:09
35 0.401899 0.396897 0.930353 00:10
36 0.394460 0.396639 0.930351 00:10
37 0.398426 0.396491 0.930353 00:10
38 0.395278 0.396405 0.930353 00:09
39 0.396054 0.396212 0.930355 00:09
40 0.397802 0.396831 0.930355 00:09
41 0.397342 0.396381 0.930353 00:09
42 0.394825 0.396093 0.930353 00:09
43 0.394430 0.396009 0.930353 00:09
44 0.393007 0.396109 0.930353 00:09
45 0.396442 0.395979 0.930357 00:09
46 0.393430 0.395946 0.930355 00:09
47 0.397666 0.395946 0.930355 00:09
48 0.390603 0.395927 0.930353 00:09
49 0.392032 0.395919 0.930354 00:09
50 0.395708 0.395914 0.930352 00:09
Paths:  31223 , Tokens:  5473
[172, 372, 527, 1718, 1828, 2132, 2229, 2312, 2424, 2454, 2462, 2633, 3129, 3159, 3276, 3356, 3417, 3637, 3692, 4288, 4772, 5216, 5364, 5384, 5463]
0
Adj. Rand Index Score: 0.569727.
Adj. Mutual Info Score: 0.300430.
Classes:  [-1.  0.  1.] n_clusters:  2.0
Seed:  42
epoch train_loss valid_loss accuracy time
1 3.747041 3.405118 0.538893 00:09
2 1.345351 1.257935 0.815496 00:09
3 0.632773 0.601827 0.903744 00:09
4 0.466711 0.458633 0.922742 00:09
5 0.425034 0.425849 0.928829 00:09
6 0.419015 0.415767 0.929088 00:09
7 0.411803 0.412442 0.928913 00:09
8 0.405127 0.411244 0.928953 00:09
9 0.409721 0.408889 0.930179 00:09
10 0.406655 0.404699 0.930185 00:09
11 0.409478 0.406254 0.928992 00:09
12 0.406055 0.401967 0.930203 00:09
13 0.407355 0.403592 0.930157 00:09
14 0.405571 0.400504 0.930172 00:09
15 0.405317 0.400892 0.930259 00:09
16 0.403486 0.400841 0.929857 00:09
17 0.397982 0.400714 0.930311 00:09
18 0.400553 0.401089 0.930243 00:09
19 0.402967 0.400583 0.930300 00:09
20 0.404325 0.399072 0.930318 00:09
21 0.408692 0.400002 0.930267 00:09
22 0.400300 0.399134 0.930323 00:09
23 0.404786 0.398896 0.929409 00:09
24 0.408506 0.398851 0.930297 00:09
25 0.401556 0.398864 0.930063 00:09
26 0.398147 0.398634 0.930261 00:09
27 0.398104 0.398608 0.930329 00:09
28 0.401491 0.398112 0.930289 00:09
29 0.403126 0.397894 0.930342 00:09
30 0.399383 0.397610 0.930208 00:09
31 0.397273 0.397958 0.930345 00:09
32 0.404442 0.397784 0.930014 00:09
33 0.399758 0.397757 0.930347 00:09
34 0.398927 0.397531 0.930331 00:09
35 0.395823 0.397142 0.930345 00:09
36 0.396322 0.397031 0.930345 00:09
37 0.399595 0.397273 0.930352 00:09
38 0.398240 0.396812 0.930346 00:09
39 0.399894 0.396661 0.930346 00:09
40 0.393927 0.396846 0.930343 00:09
41 0.398072 0.396492 0.930344 00:09
42 0.397349 0.396330 0.930348 00:09
43 0.392435 0.396209 0.930347 00:09
44 0.393428 0.396266 0.930352 00:09
45 0.393112 0.396055 0.930349 00:09
46 0.394403 0.396057 0.930353 00:09
47 0.396006 0.396049 0.930348 00:09
48 0.396428 0.395984 0.930349 00:09
49 0.392737 0.396032 0.930349 00:09
50 0.390766 0.395970 0.930355 00:09
Paths:  31223 , Tokens:  5473
[172, 372, 527, 1718, 1828, 2132, 2229, 2312, 2424, 2454, 2462, 2633, 3129, 3159, 3276, 3356, 3417, 3637, 3692, 4288, 4772, 5216, 5364, 5384, 5463]
0
Adj. Rand Index Score: 0.569727.
Adj. Mutual Info Score: 0.300430.
Classes:  [-1.  0.  1.] n_clusters:  2.0
Seed:  1234
epoch train_loss valid_loss accuracy time
1 3.955576 3.543739 0.541438 00:09
2 1.375864 1.268803 0.813403 00:09
3 0.657414 0.606933 0.903389 00:09
4 0.470893 0.461481 0.924125 00:09
5 0.428709 0.421482 0.928017 00:09
6 0.412500 0.413970 0.929547 00:09
7 0.416637 0.408018 0.928998 00:09
8 0.410530 0.408382 0.930247 00:09
9 0.409068 0.406541 0.928520 00:09
10 0.404928 0.403036 0.929928 00:09
11 0.405745 0.403237 0.929478 00:09
12 0.409469 0.402559 0.930225 00:09
13 0.413968 0.402960 0.930256 00:09
14 0.407376 0.403238 0.930177 00:09
15 0.404054 0.401001 0.930276 00:09
16 0.403921 0.399826 0.930313 00:09
17 0.404355 0.399942 0.930281 00:09
18 0.409410 0.399009 0.930304 00:09
19 0.399695 0.399239 0.929474 00:09
20 0.399143 0.399001 0.930271 00:09
21 0.401163 0.398116 0.930286 00:09
22 0.409228 0.398133 0.929979 00:09
23 0.401563 0.397835 0.930309 00:09
24 0.400331 0.397460 0.930305 00:09
25 0.396957 0.397973 0.930330 00:09
26 0.401001 0.396982 0.930304 00:09
27 0.404779 0.397510 0.929574 00:09
28 0.404817 0.397403 0.930321 00:09
29 0.399288 0.397185 0.930342 00:09
30 0.397734 0.397119 0.930345 00:09
31 0.400806 0.397100 0.930328 00:09
32 0.397860 0.396780 0.930350 00:09
33 0.400571 0.397009 0.930353 00:09
34 0.395634 0.396790 0.930353 00:09
35 0.402102 0.396581 0.930353 00:09
36 0.395579 0.396639 0.930352 00:09
37 0.403282 0.396475 0.930351 00:09
38 0.400568 0.396519 0.930348 00:09
39 0.397685 0.396328 0.930353 00:09
40 0.395213 0.396463 0.930350 00:09
41 0.395856 0.396187 0.930351 00:09
42 0.392882 0.396166 0.930350 00:09
43 0.399172 0.396080 0.930352 00:09
44 0.393774 0.396010 0.930351 00:09
45 0.389217 0.395970 0.930350 00:09
46 0.398488 0.395966 0.930349 00:09
47 0.394791 0.395915 0.930353 00:09
48 0.398387 0.395905 0.930353 00:09
49 0.393246 0.395885 0.930353 00:09
50 0.389657 0.395916 0.930352 00:09
Paths:  31223 , Tokens:  5473
[172, 372, 527, 1718, 1828, 2132, 2229, 2312, 2424, 2454, 2462, 2633, 3129, 3159, 3276, 3356, 3417, 3637, 3692, 4288, 4772, 5216, 5364, 5384, 5463]
Adj. Rand Index Score: 0.569727.
Adj. Mutual Info Score: 0.300430.
Classes:  [-1.  0.  1.] n_clusters:  2.0
Seed:  43210
epoch train_loss valid_loss accuracy time
1 3.969830 3.616910 0.528859 00:09
2 1.415752 1.292803 0.814208 00:09
3 0.651859 0.614602 0.896366 00:09
4 0.467375 0.459766 0.923282 00:09
5 0.431857 0.423523 0.924625 00:09
6 0.418416 0.426220 0.925542 00:09
7 0.412086 0.411762 0.929347 00:09
8 0.414202 0.409225 0.928541 00:09
9 0.409847 0.406559 0.930216 00:09
10 0.409526 0.404408 0.929850 00:09
11 0.408191 0.403464 0.929811 00:09
12 0.403246 0.402874 0.930092 00:09
13 0.408494 0.402401 0.930205 00:09
14 0.404843 0.401821 0.930253 00:09
15 0.404738 0.399999 0.930242 00:09
16 0.400047 0.400391 0.929879 00:09
17 0.403781 0.398944 0.930288 00:09
18 0.403707 0.400968 0.930283 00:09
19 0.401486 0.399192 0.929928 00:09
20 0.400401 0.399083 0.930287 00:09
21 0.404893 0.399599 0.930264 00:09
22 0.403688 0.399645 0.930317 00:09
23 0.404202 0.398884 0.929644 00:09
24 0.401563 0.399119 0.930332 00:09
25 0.401432 0.398775 0.930330 00:10
26 0.400448 0.398259 0.930335 00:10
27 0.404644 0.398628 0.930337 00:10
28 0.404084 0.398053 0.930351 00:10
29 0.401928 0.397155 0.930342 00:10
30 0.399436 0.397639 0.930319 00:09
31 0.403638 0.398152 0.930348 00:10
32 0.400396 0.397541 0.930346 00:10
33 0.401625 0.397441 0.930017 00:10
34 0.398154 0.396943 0.930350 00:10
35 0.399340 0.396889 0.930347 00:10
36 0.400483 0.396742 0.930348 00:10
37 0.394507 0.396769 0.930350 00:10
38 0.396356 0.396853 0.930347 00:09
39 0.394833 0.396460 0.930350 00:09
40 0.393769 0.396449 0.930351 00:09
41 0.392062 0.396411 0.930351 00:09
42 0.395691 0.396372 0.930352 00:09
43 0.399609 0.396257 0.930349 00:09
44 0.400447 0.396134 0.930350 00:09
45 0.398186 0.396179 0.930353 00:09
46 0.393339 0.396103 0.930350 00:09
47 0.403839 0.396046 0.930353 00:09
48 0.394197 0.396004 0.930349 00:09
49 0.392453 0.396014 0.930350 00:09
50 0.396470 0.395998 0.930348 00:09
Paths:  31223 , Tokens:  5473
[172, 372, 527, 1718, 1828, 2132, 2229, 2312, 2424, 2454, 2462, 2633, 3129, 3159, 3276, 3356, 3417, 3637, 3692, 4288, 4772, 5216, 5364, 5384, 5463]
Adj. Rand Index Score: 0.569727.
Adj. Mutual Info Score: 0.300430.
Classes:  [-1.  0.  1.] n_clusters:  2.0
Seed:  1133557799
epoch train_loss valid_loss accuracy time
1 3.758229 3.451324 0.543794 00:09
2 1.356448 1.244827 0.816301 00:09
3 0.650908 0.607050 0.903311 00:09
4 0.476285 0.460758 0.920698 00:09
5 0.429166 0.422578 0.929007 00:09
6 0.417866 0.414729 0.927833 00:09
7 0.413264 0.412799 0.929823 00:09
8 0.408960 0.412702 0.924744 00:09
9 0.413946 0.407467 0.930207 00:09
10 0.404509 0.407888 0.928409 00:09
11 0.414432 0.404611 0.928425 00:09
12 0.403902 0.404222 0.929854 00:09
13 0.409069 0.403817 0.928266 00:09
14 0.405529 0.402663 0.930160 00:09
15 0.404397 0.401831 0.930272 00:09
16 0.406074 0.400647 0.930301 00:09
17 0.402768 0.400698 0.929909 00:09
18 0.406971 0.400490 0.930297 00:09
19 0.402641 0.399876 0.930333 00:09
20 0.398772 0.400484 0.929951 00:09
21 0.401360 0.399470 0.930329 00:09
22 0.405650 0.399821 0.930064 00:09
23 0.401064 0.399642 0.930355 00:09
24 0.396967 0.399789 0.930340 00:09
25 0.400057 0.398863 0.930351 00:09
26 0.404233 0.398678 0.930332 00:09
27 0.399324 0.398231 0.930056 00:09
28 0.397195 0.398073 0.930346 00:09
29 0.401801 0.398470 0.930347 00:09
30 0.404753 0.399515 0.929963 00:09
31 0.401645 0.398312 0.930341 00:09
32 0.403396 0.397705 0.930350 00:09
33 0.398958 0.398216 0.930345 00:09
34 0.396272 0.397117 0.930348 00:09
35 0.394135 0.397399 0.930345 00:09
36 0.400842 0.397611 0.930350 00:09
37 0.402905 0.397210 0.930353 00:09
38 0.398245 0.396772 0.930357 00:09
39 0.395253 0.396766 0.930350 00:09
40 0.396806 0.396573 0.930345 00:09
41 0.405389 0.396492 0.930353 00:09
42 0.396327 0.396423 0.930352 00:09
43 0.393731 0.396262 0.930355 00:09
44 0.400045 0.396202 0.930352 00:09
45 0.394206 0.396135 0.930352 00:09
46 0.397563 0.396090 0.930348 00:09
47 0.393073 0.396042 0.930348 00:09
48 0.396244 0.396006 0.930352 00:09
49 0.396160 0.396028 0.930352 00:09
50 0.398861 0.396010 0.930351 00:09
Paths:  31223 , Tokens:  5473
[172, 372, 527, 1718, 1828, 2132, 2229, 2312, 2424, 2454, 2462, 2633, 3129, 3159, 3276, 3356, 3417, 3637, 3692, 4288, 4772, 5216, 5364, 5384, 5463]
Adj. Rand Index Score: 0.569727.
Adj. Mutual Info Score: 0.300430.
Classes:  [-1.  0.  1.] n_clusters:  2.0
Seed:  22446688
epoch train_loss valid_loss accuracy time
1 3.950721 3.586070 0.533284 00:09
2 1.400505 1.292979 0.806957 00:09
3 0.644616 0.607404 0.903851 00:09
4 0.471762 0.460117 0.923063 00:09
5 0.429336 0.429920 0.920613 00:09
6 0.418526 0.415898 0.929184 00:09
7 0.407467 0.412339 0.929894 00:09
8 0.415542 0.408989 0.929180 00:09
9 0.409787 0.407607 0.925889 00:09
10 0.408631 0.406260 0.930196 00:09
11 0.407540 0.403992 0.930207 00:09
12 0.403463 0.401895 0.930237 00:09
13 0.403416 0.402106 0.930235 00:09
14 0.403207 0.399420 0.929888 00:09
15 0.403202 0.398353 0.930226 00:09
16 0.408362 0.398679 0.930262 00:09
17 0.405704 0.398710 0.930165 00:09
18 0.397833 0.398820 0.930166 00:09
19 0.399326 0.398184 0.930236 00:09
20 0.402291 0.398036 0.930215 00:09
21 0.403416 0.398303 0.930286 00:09
22 0.404276 0.397588 0.929944 00:09
23 0.402547 0.398258 0.929888 00:09
24 0.400621 0.397564 0.930327 00:09
25 0.398297 0.397535 0.929516 00:09
26 0.399137 0.397474 0.929934 00:09
27 0.401140 0.398190 0.930332 00:09
28 0.398566 0.397854 0.930343 00:09
29 0.399489 0.397192 0.930333 00:09
30 0.401829 0.397091 0.930351 00:09
31 0.402014 0.396850 0.930349 00:09
32 0.404854 0.396828 0.930349 00:09
33 0.398054 0.396530 0.930353 00:09
34 0.401072 0.396455 0.930352 00:09
35 0.393530 0.396674 0.930352 00:09
36 0.399352 0.396395 0.930352 00:09
37 0.402600 0.396591 0.930354 00:09
38 0.402396 0.396449 0.930355 00:09
39 0.397020 0.396363 0.930354 00:09
40 0.393973 0.396231 0.930355 00:09
41 0.396415 0.396184 0.930355 00:09
42 0.398730 0.396244 0.930355 00:09
43 0.394312 0.396242 0.930355 00:10
44 0.392848 0.396037 0.930354 00:10
45 0.397533 0.396035 0.930354 00:10
46 0.397655 0.395988 0.930355 00:10
47 0.398662 0.395930 0.930356 00:09
48 0.392300 0.395939 0.930354 00:09
49 0.397013 0.395914 0.930355 00:09
50 0.399568 0.395915 0.930355 00:09
Paths:  31223 , Tokens:  5473
[172, 372, 527, 1718, 1828, 2132, 2229, 2312, 2424, 2454, 2462, 2633, 3129, 3159, 3276, 3356, 3417, 3637, 3692, 4288, 4772, 5216, 5364, 5384, 5463]
Adj. Rand Index Score: 0.569727.
Adj. Mutual Info Score: 0.300430.
Classes:  [-1.  0.  1.] n_clusters:  2.0
Seed:  123456789
epoch train_loss valid_loss accuracy time
1 3.913286 3.570210 0.522966 00:09
2 1.416599 1.294890 0.808301 00:09
3 0.658353 0.611921 0.898794 00:09
4 0.477128 0.463345 0.918568 00:09
5 0.430998 0.419887 0.928739 00:09
6 0.421267 0.419643 0.928486 00:09
7 0.414414 0.409930 0.930174 00:09
8 0.415846 0.411123 0.929702 00:09
9 0.408587 0.404407 0.929204 00:09
10 0.413043 0.406583 0.929893 00:09
11 0.407560 0.402652 0.930266 00:09
12 0.401698 0.401754 0.929460 00:09
13 0.406592 0.400586 0.930255 00:09
14 0.405708 0.402277 0.930188 00:09
15 0.404554 0.400388 0.929917 00:09
16 0.407088 0.400641 0.929963 00:09
17 0.401854 0.400573 0.930254 00:09
18 0.403025 0.401318 0.930292 00:09
19 0.404471 0.398915 0.929995 00:09
20 0.396489 0.399079 0.930320 00:09
21 0.404421 0.399052 0.930260 00:09
22 0.407902 0.398580 0.930286 00:09
23 0.399452 0.398113 0.930309 00:09
24 0.400880 0.397605 0.930290 00:09
25 0.399469 0.397176 0.930342 00:09
26 0.404387 0.397249 0.930350 00:09
27 0.398339 0.397320 0.930321 00:09
28 0.401917 0.397239 0.930335 00:09
29 0.402540 0.397526 0.930349 00:09
30 0.397602 0.396979 0.930350 00:09
31 0.394009 0.396937 0.930323 00:09
32 0.398211 0.397526 0.930344 00:09
33 0.400237 0.396682 0.930348 00:09
34 0.401340 0.396932 0.930349 00:09
35 0.400947 0.397032 0.930353 00:09
36 0.393003 0.396697 0.930347 00:09
37 0.397889 0.396718 0.930354 00:09
38 0.400234 0.396711 0.930348 00:09
39 0.397443 0.396392 0.930351 00:09
40 0.395303 0.396462 0.930352 00:09
41 0.397605 0.396260 0.930351 00:09
42 0.397050 0.396086 0.930354 00:09
43 0.392269 0.396172 0.930349 00:09
44 0.397704 0.396053 0.930353 00:09
45 0.395013 0.396016 0.930355 00:09
46 0.385481 0.395987 0.930349 00:09
47 0.399581 0.395939 0.930354 00:09
48 0.390918 0.395935 0.930353 00:09
49 0.395118 0.395923 0.930351 00:09
50 0.398711 0.395948 0.930350 00:09
Paths:  31223 , Tokens:  5473
[172, 372, 527, 1718, 1828, 2132, 2229, 2312, 2424, 2454, 2462, 2633, 3129, 3159, 3276, 3356, 3417, 3637, 3692, 4288, 4772, 5216, 5364, 5384, 5463]
0
Adj. Rand Index Score: 0.569727.
Adj. Mutual Info Score: 0.300430.
Classes:  [-1.  0.  1.] n_clusters:  2.0
Seed:  987654321
epoch train_loss valid_loss accuracy time
1 3.759575 3.389054 0.553828 00:10
2 1.352957 1.234150 0.817174 00:10
3 0.633703 0.600160 0.904786 00:10
4 0.476021 0.459948 0.921992 00:10
5 0.433674 0.423544 0.928457 00:09
6 0.410566 0.414769 0.928627 00:10
7 0.407334 0.411227 0.929358 00:10
8 0.408181 0.407968 0.929059 00:10
9 0.409319 0.408619 0.929342 00:10
10 0.405687 0.405440 0.930211 00:10
11 0.403178 0.405788 0.930193 00:10
12 0.406559 0.405551 0.929746 00:10
13 0.413343 0.404008 0.928682 00:09
14 0.405924 0.400498 0.930256 00:09
15 0.409140 0.401298 0.930239 00:10
16 0.404126 0.400169 0.930198 00:10
17 0.403506 0.401031 0.930299 00:09
18 0.407528 0.400481 0.929108 00:10
19 0.401150 0.398690 0.930330 00:09
20 0.406784 0.399038 0.930332 00:09
21 0.406142 0.399514 0.930260 00:10
22 0.401166 0.400083 0.930336 00:10
23 0.398783 0.399292 0.929962 00:10
24 0.397950 0.399021 0.929936 00:09
25 0.397807 0.399108 0.930315 00:09
26 0.400141 0.397758 0.930346 00:10
27 0.401694 0.398107 0.930336 00:10
28 0.405423 0.398404 0.929967 00:09
29 0.407516 0.398005 0.930345 00:10
30 0.398708 0.398263 0.930342 00:10
31 0.402930 0.398033 0.930338 00:10
32 0.399294 0.397509 0.930342 00:10
33 0.398898 0.397511 0.930348 00:10
34 0.397666 0.397507 0.930344 00:09
35 0.401958 0.397000 0.930342 00:09
36 0.398930 0.396909 0.930346 00:09
37 0.399595 0.397107 0.930339 00:09
38 0.394634 0.396802 0.930350 00:09
39 0.396435 0.396742 0.930349 00:09
40 0.396221 0.396630 0.930352 00:09
41 0.395905 0.396587 0.930347 00:09
42 0.393728 0.396590 0.930347 00:09
43 0.398644 0.396219 0.930343 00:09
44 0.394760 0.396202 0.930347 00:09
45 0.391934 0.396160 0.930348 00:09
46 0.394949 0.396115 0.930348 00:09
47 0.397093 0.396080 0.930347 00:09
48 0.396542 0.396011 0.930352 00:09
49 0.394607 0.396006 0.930349 00:09
50 0.394683 0.395999 0.930350 00:09
Paths:  31223 , Tokens:  5473
[172, 372, 527, 1718, 1828, 2132, 2229, 2312, 2424, 2454, 2462, 2633, 3129, 3159, 3276, 3356, 3417, 3637, 3692, 4288, 4772, 5216, 5364, 5384, 5463]
Adj. Rand Index Score: 0.569727.
Adj. Mutual Info Score: 0.300430.
Classes:  [-1.  0.  1.] n_clusters:  2.0
Seed:  86420
epoch train_loss valid_loss accuracy time
1 4.146927 3.775892 0.502135 00:10
2 1.494770 1.353839 0.803985 00:09
3 0.638290 0.620994 0.896225 00:09
4 0.481302 0.461624 0.918550 00:09
5 0.431629 0.425730 0.928360 00:09
6 0.414731 0.418303 0.926090 00:09
7 0.415296 0.412551 0.927827 00:09
8 0.418562 0.408718 0.929322 00:09
9 0.412157 0.405847 0.930274 00:09
10 0.407669 0.407050 0.929999 00:09
11 0.410317 0.403627 0.930247 00:09
12 0.405673 0.403685 0.929896 00:09
13 0.407737 0.402763 0.929990 00:09
14 0.405613 0.402600 0.930111 00:09
15 0.401270 0.401451 0.929488 00:09
16 0.406464 0.401668 0.929865 00:09
17 0.403041 0.400119 0.930245 00:09
18 0.403965 0.399970 0.929900 00:09
19 0.407917 0.400582 0.930299 00:09
20 0.399261 0.399309 0.930315 00:09
21 0.400014 0.398915 0.930324 00:09
22 0.401977 0.399152 0.930340 00:09
23 0.404666 0.399027 0.930326 00:09
24 0.399983 0.398548 0.930323 00:09
25 0.396824 0.398832 0.930334 00:09
26 0.398344 0.398383 0.930326 00:09
27 0.403762 0.398199 0.930347 00:09
28 0.399212 0.397682 0.930336 00:09
29 0.402679 0.398606 0.929155 00:10
30 0.397036 0.399839 0.930348 00:10
31 0.398979 0.397741 0.930338 00:10
32 0.401586 0.397670 0.930346 00:10
33 0.401921 0.397491 0.930014 00:10
34 0.400832 0.397191 0.930041 00:10
35 0.396891 0.397476 0.930342 00:10
36 0.393959 0.397704 0.930343 00:09
37 0.399579 0.397164 0.930348 00:09
38 0.395272 0.397066 0.930346 00:09
39 0.398992 0.396885 0.930343 00:09
40 0.393460 0.396516 0.930352 00:09
41 0.394705 0.396534 0.930349 00:09
42 0.396734 0.396414 0.930350 00:09
43 0.393185 0.396388 0.930347 00:09
44 0.397647 0.396267 0.930347 00:09
45 0.395430 0.396138 0.930351 00:09
46 0.394379 0.396059 0.930350 00:09
47 0.399740 0.396013 0.930351 00:09
48 0.392651 0.395996 0.930349 00:09
49 0.395876 0.395977 0.930349 00:09
50 0.392402 0.395965 0.930350 00:09
Paths:  31223 , Tokens:  5473
[172, 372, 527, 1718, 1828, 2132, 2229, 2312, 2424, 2454, 2462, 2633, 3129, 3159, 3276, 3356, 3417, 3637, 3692, 4288, 4772, 5216, 5364, 5384, 5463]
Adj. Rand Index Score: 0.569727.
Adj. Mutual Info Score: 0.300430.
Classes:  [-1.  0.  1.] n_clusters:  2.0
In [325]:
perf_output
Out[325]:
[(0.7921222771673334, 0.6256005245618719),
 (0.6901459877245661, 0.55538929616125),
 (0.9510116641908538, 0.9099986699927317),
 (0.9510116641908538, 0.9099986699927317),
 (0.9510116641908538, 0.9099986699927317),
 (0.9510116641908538, 0.9099986699927317),
 (0.9510116641908538, 0.9099986699927317),
 (0.9510116641908538, 0.9099986699927317),
 (0.9510116641908538, 0.9099986699927317),
 (0.9510116641908538, 0.9099986699927317),
 (0.9510116641908538, 0.9099986699927317),
 (0.9510116641908538, 0.9099986699927317),
 (0.2833139524710691, 0.40271481855442615),
 (0.333800696792064, 0.44100083357096465),
 (0.29480471723393786, 0.3941229139711741),
 (0.29480471723393786, 0.3941229139711741),
 (0.29480471723393786, 0.3941229139711741),
 (0.29480471723393786, 0.3941229139711741),
 (0.29480471723393786, 0.3941229139711741),
 (0.29480471723393786, 0.3941229139711741),
 (0.29480471723393786, 0.3941229139711741),
 (0.29480471723393786, 0.3941229139711741),
 (0.29480471723393786, 0.3941229139711741),
 (0.29480471723393786, 0.3941229139711741),
 (0.539408633653917, 0.5476909123005289),
 (0.5681159420289855, 0.5767707120409254),
 (0.5437515388376617, 0.5425149956512012),
 (0.5437515388376617, 0.5425149956512012),
 (0.5437515388376617, 0.5425149956512012),
 (0.5437515388376617, 0.5425149956512012),
 (0.5437515388376617, 0.5425149956512012),
 (0.5437515388376617, 0.5425149956512012),
 (0.5437515388376617, 0.5425149956512012),
 (0.5437515388376617, 0.5425149956512012),
 (0.5437515388376617, 0.5425149956512012),
 (0.5437515388376617, 0.5425149956512012),
 (0.7519593275210339, 0.8118059294436889),
 (0.8151147523125724, 0.8386258098212307),
 (0.7701848596617481, 0.8265364192870966),
 (0.7680244646562355, 0.823527835562166),
 (0.7680244646562355, 0.823527835562166),
 (0.7680244646562355, 0.823527835562166),
 (0.7680244646562355, 0.823527835562166),
 (0.7680244646562355, 0.823527835562166),
 (0.7680244646562355, 0.823527835562166),
 (0.7680244646562355, 0.823527835562166),
 (0.7680244646562355, 0.823527835562166),
 (0.7680244646562355, 0.823527835562166),
 (0.41377319922472133, 0.5049529936482817),
 (0.752283458595894, 0.7071262724512921),
 (0.4031366998347653, 0.4732620761924827),
 (0.4031366998347653, 0.4732620761924827),
 (0.4031366998347653, 0.4732620761924827),
 (0.4031366998347653, 0.4732620761924827),
 (0.4031366998347653, 0.4732620761924827),
 (0.4031366998347653, 0.4732620761924827),
 (0.4031366998347653, 0.4732620761924827),
 (0.4031366998347653, 0.4732620761924827),
 (0.4031366998347653, 0.4732620761924827),
 (0.4031366998347653, 0.4732620761924827),
 (0.7600921529693362, 0.7798879234563406),
 (0.48951689214025595, 0.5846435579639829),
 (0.7583992973262297, 0.7772286287188804),
 (0.7583992973262297, 0.7772286287188804),
 (0.7583992973262297, 0.7772286287188804),
 (0.7583992973262297, 0.7772286287188804),
 (0.7583992973262297, 0.7772286287188804),
 (0.7583992973262297, 0.7772286287188804),
 (0.7583992973262297, 0.7772286287188804),
 (0.7583992973262297, 0.7772286287188804),
 (0.7583992973262297, 0.7772286287188804),
 (0.7583992973262297, 0.7772286287188804),
 (0.5770295286165554, 0.35415845100703636),
 (0.027815165702526644, 0.08921525083964738),
 (0.56972685784943, 0.30043006553295304),
 (0.56972685784943, 0.30043006553295304),
 (0.56972685784943, 0.30043006553295304),
 (0.56972685784943, 0.30043006553295304),
 (0.56972685784943, 0.30043006553295304),
 (0.56972685784943, 0.30043006553295304),
 (0.56972685784943, 0.30043006553295304),
 (0.56972685784943, 0.30043006553295304),
 (0.56972685784943, 0.30043006553295304),
 (0.56972685784943, 0.30043006553295304)]
In [326]:
import json
with open('perf_out.txt', 'w') as filehandle:  
    json.dump(perf_output, filehandle)

Exploratory Data Analysis

Some exploratory data analysis to inspect the dimensionality reduction techniques:

In [254]:
#data0 = pd.read_csv("data_sets/00_bunch/banknote/banknote.csv", header=None)
#data0 = pd.read_csv("data_sets/00_bunch/glass/glass.csv", header=None)
#data0 = pd.read_csv("data_sets/00_bunch/iris/iris.csv", header=None)
#data0 = pd.read_csv("data_sets/00_bunch/mnist/mnist.csv", header=None)
data0 = pd.read_csv("data_sets/00_bunch/seeds/seeds.csv", header=None)
#data0 = pd.read_csv("data_sets/01_bunch/phoneme/phoneme.csv", header=None)
#data0 = pd.read_csv("data_sets/02_bunch/page-blocks/page-blocks.csv", header=None)

X = data0.iloc[:,:-1].values
y = data0.iloc[:, -1].values
X.shape, y.shape
Out[254]:
((210, 7), (210,))
In [137]:
pca_trans = PCA(n_components=4).fit(X)
clusterable_embedding = pca_trans.transform(X)
In [134]:
np.sum(pca_trans.explained_variance_ratio_)
Out[134]:
0.7719867961314373
In [255]:
import umap
clusterable_embedding = umap.UMAP(
    n_neighbors=50,
    min_dist=0.0,
    n_components=4,
    random_state=0,
).fit_transform(X)
In [256]:
#clusterable_embedding = X
plt.figure(figsize=fig_size)
plt.scatter(clusterable_embedding[:, 0], clusterable_embedding[:, 1],
            c=y, s=50, cmap='jet');
In [257]:
#clusterable_embedding = X
plt.figure(figsize=fig_size)
plt.scatter(clusterable_embedding[:, 0], clusterable_embedding[:, 1],
            c=y_hat, s=50, cmap='jet');